Compare commits

...

33 Commits

Author SHA1 Message Date
ItzCrazyKns
8dc54efbdd feat(chat-route): lint & beautify 2025-08-21 17:48:55 +05:30
ItzCrazyKns
72f26b4370 feat(upload): save files uploaded after chat created 2025-08-21 17:47:49 +05:30
ItzCrazyKns
f680188905 feat(ollama): add ability to provide api key 2025-08-20 20:32:13 +05:30
ItzCrazyKns
0b15bfbe32 feat(app): switch to useChat hook 2025-08-20 20:21:06 +05:30
ItzCrazyKns
8fc7808654 feat(hooks): implement useChat hook 2025-08-20 20:20:50 +05:30
ItzCrazyKns
0dc17286b9 Merge branch 'pr/843' 2025-08-12 21:39:25 +05:30
ItzCrazyKns
3edd7d44dd feat(openai): conditionally set temperature 2025-08-12 21:39:14 +05:30
Samuel Dockery
1132997108 feat: Add support for latest AI models from Anthropic, Google, & OpenAI 2025-08-10 07:50:31 -07:00
ItzCrazyKns
eadbedb713 feat(groq): switch to @langchain/groq for better handling 2025-08-02 17:14:34 +05:30
ItzCrazyKns
37cd6d3ab5 feat(discover): prevent duplicate articles 2025-08-01 20:41:07 +05:30
ItzCrazyKns
88be3a045b feat(discover): randomly sort results 2025-07-29 13:18:36 +05:30
ItzCrazyKns
45b51ab156 feat(discover-api): handle topics 2025-07-29 13:17:07 +05:30
ItzCrazyKns
3bee01cfa7 feat(discover): add topic selection 2025-07-28 20:39:50 +05:30
ItzCrazyKns
567c6a8758 Merge branch 'pr/831' 2025-07-24 22:36:19 +05:30
ItzCrazyKns
81a91da743 feat(gemini): use model instead of modelName 2025-07-23 12:22:26 +05:30
ItzCrazyKns
70a61ee1eb feat(think-box): handle thinkingEnded 2025-07-23 12:21:07 +05:30
ItzCrazyKns
9d89a4413b feat(format-history): remove extra : 2025-07-23 12:20:49 +05:30
ItzCrazyKns
6ea17d54c6 feat(chat-window): fix wrong history while rewriting 2025-07-22 21:21:49 +05:30
ItzCrazyKns
11a828b073 feat(message-box): close think box after thinking process ends 2025-07-22 21:21:09 +05:30
ItzCrazyKns
37022fb11e feat(format-history): update role determination 2025-07-22 21:20:16 +05:30
ItzCrazyKns
dd50d4927b Merge branch 'master' of https://github.com/ItzCrazyKns/Perplexica 2025-07-22 12:27:11 +05:30
ItzCrazyKns
fdaf3af3af Merge pull request #832 from tuxthepenguin84/patch-2
Fix name of provider in embeddings models error message
2025-07-21 20:56:24 +05:30
Samuel Dockery
3f2a8f862c Fix name of provider in embeddings models error message 2025-07-20 09:20:39 -07:00
Samuel Dockery
58c7be6e95 Update Gemini 2.5 Models 2025-07-20 09:17:20 -07:00
ItzCrazyKns
829b4e7134 feat(custom-openai): use apiKey instead of openAIApiKey 2025-07-19 21:37:34 +05:30
ItzCrazyKns
77870b39cc feat(ollama): use @langchain/ollama library 2025-07-19 21:37:34 +05:30
ItzCrazyKns
8e0ae9b867 feat(providers): switch to apiKey key 2025-07-19 21:37:34 +05:30
ItzCrazyKns
543f1df5ce feat(modules): update langchain packages 2025-07-19 21:37:34 +05:30
ItzCrazyKns
341aae4587 Merge branch 'pr/830' 2025-07-19 21:36:23 +05:30
Willie Zutz
7f62907385 feat(weather): update measurement units to Imperial/Metric 2025-07-19 08:53:11 -06:00
ItzCrazyKns
7c4aa683a2 feat(chains): remove unused imports 2025-07-19 17:57:32 +05:30
ItzCrazyKns
b48b0eeb0e feat(imageSearch): use XML parsing, implement few shot prompting 2025-07-19 17:52:30 +05:30
ItzCrazyKns
cddc793915 feat(videoSearch): use XML parsing, use few shot prompting 2025-07-19 17:52:14 +05:30
41 changed files with 1436 additions and 1363 deletions

View File

@@ -15,11 +15,13 @@
"@headlessui/react": "^2.2.0",
"@iarna/toml": "^2.2.5",
"@icons-pack/react-simple-icons": "^12.3.0",
"@langchain/anthropic": "^0.3.15",
"@langchain/community": "^0.3.36",
"@langchain/core": "^0.3.42",
"@langchain/google-genai": "^0.1.12",
"@langchain/openai": "^0.0.25",
"@langchain/anthropic": "^0.3.24",
"@langchain/community": "^0.3.49",
"@langchain/core": "^0.3.66",
"@langchain/google-genai": "^0.2.15",
"@langchain/groq": "^0.2.3",
"@langchain/ollama": "^0.2.3",
"@langchain/openai": "^0.6.2",
"@langchain/textsplitters": "^0.1.0",
"@tailwindcss/typography": "^0.5.12",
"@xenova/transformers": "^2.17.2",
@@ -31,7 +33,7 @@
"drizzle-orm": "^0.40.1",
"html-to-text": "^9.0.5",
"jspdf": "^3.0.1",
"langchain": "^0.1.30",
"langchain": "^0.3.30",
"lucide-react": "^0.363.0",
"mammoth": "^1.9.1",
"markdown-to-jsx": "^7.7.2",

View File

@@ -1,11 +1,7 @@
import prompts from '@/lib/prompts';
import MetaSearchAgent from '@/lib/search/metaSearchAgent';
import crypto from 'crypto';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { EventEmitter } from 'stream';
import {
chatModelProviders,
embeddingModelProviders,
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
@@ -138,6 +134,8 @@ const handleHistorySave = async (
where: eq(chats.id, message.chatId),
});
const fileData = files.map(getFileDetails);
if (!chat) {
await db
.insert(chats)
@@ -146,9 +144,15 @@ const handleHistorySave = async (
title: message.content,
createdAt: new Date().toString(),
focusMode: focusMode,
files: files.map(getFileDetails),
files: fileData,
})
.execute();
} else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) {
db.update(chats)
.set({
files: files.map(getFileDetails),
})
.where(eq(chats.id, message.chatId));
}
const messageExists = await db.query.messages.findFirst({
@@ -223,7 +227,7 @@ export const POST = async (req: Request) => {
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
openAIApiKey: getCustomOpenaiApiKey(),
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {

View File

@@ -11,6 +11,7 @@ import {
getAimlApiKey,
getLMStudioApiEndpoint,
updateConfig,
getOllamaApiKey,
} from '@/lib/config';
import {
getAvailableChatModelProviders,
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
@@ -93,6 +95,7 @@ export const POST = async (req: Request) => {
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
API_KEY: config.ollamaApiKey,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,

View File

@@ -1,55 +1,72 @@
import { searchSearxng } from '@/lib/searxng';
const articleWebsites = [
'yahoo.com',
'www.exchangewire.com',
'businessinsider.com',
/* 'wired.com',
'mashable.com',
'theverge.com',
'gizmodo.com',
'cnet.com',
'venturebeat.com', */
];
const websitesForTopic = {
tech: {
query: ['technology news', 'latest tech', 'AI', 'science and innovation'],
links: ['techcrunch.com', 'wired.com', 'theverge.com'],
},
finance: {
query: ['finance news', 'economy', 'stock market', 'investing'],
links: ['bloomberg.com', 'cnbc.com', 'marketwatch.com'],
},
art: {
query: ['art news', 'culture', 'modern art', 'cultural events'],
links: ['artnews.com', 'hyperallergic.com', 'theartnewspaper.com'],
},
sports: {
query: ['sports news', 'latest sports', 'cricket football tennis'],
links: ['espn.com', 'bbc.com/sport', 'skysports.com'],
},
entertainment: {
query: ['entertainment news', 'movies', 'TV shows', 'celebrities'],
links: ['hollywoodreporter.com', 'variety.com', 'deadline.com'],
},
};
const topics = ['AI', 'tech']; /* TODO: Add UI to customize this */
type Topic = keyof typeof websitesForTopic;
export const GET = async (req: Request) => {
try {
const params = new URL(req.url).searchParams;
const mode: 'normal' | 'preview' =
(params.get('mode') as 'normal' | 'preview') || 'normal';
const topic: Topic = (params.get('topic') as Topic) || 'tech';
const selectedTopic = websitesForTopic[topic];
let data = [];
if (mode === 'normal') {
const seenUrls = new Set();
data = (
await Promise.all([
...new Array(articleWebsites.length * topics.length)
.fill(0)
.map(async (_, i) => {
await Promise.all(
selectedTopic.links.flatMap((link) =>
selectedTopic.query.map(async (query) => {
return (
await searchSearxng(
`site:${articleWebsites[i % articleWebsites.length]} ${
topics[i % topics.length]
}`,
{
await searchSearxng(`site:${link} ${query}`, {
engines: ['bing news'],
pageno: 1,
language: 'en',
},
)
})
).results;
}),
])
),
)
)
.map((result) => result)
.flat()
.filter((item) => {
const url = item.url?.toLowerCase().trim();
if (seenUrls.has(url)) return false;
seenUrls.add(url);
return true;
})
.sort(() => Math.random() - 0.5);
} else {
data = (
await searchSearxng(
`site:${articleWebsites[Math.floor(Math.random() * articleWebsites.length)]} ${topics[Math.floor(Math.random() * topics.length)]}`,
`site:${selectedTopic.links[Math.floor(Math.random() * selectedTopic.links.length)]} ${selectedTopic.query[Math.floor(Math.random() * selectedTopic.query.length)]}`,
{
engines: ['bing news'],
pageno: 1,

View File

@@ -49,7 +49,7 @@ export const POST = async (req: Request) => {
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
openAIApiKey: getCustomOpenaiApiKey(),
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {

View File

@@ -81,8 +81,7 @@ export const POST = async (req: Request) => {
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
modelName: body.chatModel?.name || getCustomOpenaiModelName(),
openAIApiKey:
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
apiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
temperature: 0.7,
configuration: {
baseURL:

View File

@@ -48,7 +48,7 @@ export const POST = async (req: Request) => {
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
openAIApiKey: getCustomOpenaiApiKey(),
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {

View File

@@ -49,7 +49,7 @@ export const POST = async (req: Request) => {
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
openAIApiKey: getCustomOpenaiApiKey(),
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {

View File

@@ -1,7 +1,10 @@
export const POST = async (req: Request) => {
try {
const body: { lat: number; lng: number; temperatureUnit: 'C' | 'F' } =
await req.json();
const body: {
lat: number;
lng: number;
measureUnit: 'Imperial' | 'Metric';
} = await req.json();
if (!body.lat || !body.lng) {
return Response.json(
@@ -13,7 +16,9 @@ export const POST = async (req: Request) => {
}
const res = await fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}&current=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${body.temperatureUnit === 'C' ? '' : '&temperature_unit=fahrenheit'}`,
`https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}&current=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${
body.measureUnit === 'Metric' ? '' : '&temperature_unit=fahrenheit'
}${body.measureUnit === 'Metric' ? '' : '&wind_speed_unit=mph'}`,
);
const data = await res.json();
@@ -35,13 +40,15 @@ export const POST = async (req: Request) => {
windSpeed: number;
icon: string;
temperatureUnit: 'C' | 'F';
windSpeedUnit: 'm/s' | 'mph';
} = {
temperature: data.current.temperature_2m,
condition: '',
humidity: data.current.relative_humidity_2m,
windSpeed: data.current.wind_speed_10m,
icon: '',
temperatureUnit: body.temperatureUnit,
temperatureUnit: body.measureUnit === 'Metric' ? 'C' : 'F',
windSpeedUnit: body.measureUnit === 'Metric' ? 'm/s' : 'mph',
};
const code = data.current.weather_code;

View File

@@ -1,9 +1,17 @@
import ChatWindow from '@/components/ChatWindow';
import React from 'react';
'use client';
const Page = ({ params }: { params: Promise<{ chatId: string }> }) => {
const { chatId } = React.use(params);
return <ChatWindow id={chatId} />;
import ChatWindow from '@/components/ChatWindow';
import { useParams } from 'next/navigation';
import React from 'react';
import { ChatProvider } from '@/lib/hooks/useChat';
const Page = () => {
const { chatId }: { chatId: string } = useParams();
return (
<ChatProvider id={chatId}>
<ChatWindow />
</ChatProvider>
);
};
export default Page;

View File

@@ -4,6 +4,7 @@ import { Search } from 'lucide-react';
import { useEffect, useState } from 'react';
import Link from 'next/link';
import { toast } from 'sonner';
import { cn } from '@/lib/utils';
interface Discover {
title: string;
@@ -12,14 +13,38 @@ interface Discover {
thumbnail: string;
}
const topics: { key: string; display: string }[] = [
{
display: 'Tech & Science',
key: 'tech',
},
{
display: 'Finance',
key: 'finance',
},
{
display: 'Art & Culture',
key: 'art',
},
{
display: 'Sports',
key: 'sports',
},
{
display: 'Entertainment',
key: 'entertainment',
},
];
const Page = () => {
const [discover, setDiscover] = useState<Discover[] | null>(null);
const [loading, setLoading] = useState(true);
const [activeTopic, setActiveTopic] = useState<string>(topics[0].key);
useEffect(() => {
const fetchData = async () => {
const fetchArticles = async (topic: string) => {
setLoading(true);
try {
const res = await fetch(`/api/discover`, {
const res = await fetch(`/api/discover?topic=${topic}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
@@ -43,10 +68,39 @@ const Page = () => {
}
};
fetchData();
}, []);
useEffect(() => {
fetchArticles(activeTopic);
}, [activeTopic]);
return loading ? (
return (
<>
<div>
<div className="flex flex-col pt-4">
<div className="flex items-center">
<Search />
<h1 className="text-3xl font-medium p-2">Discover</h1>
</div>
<hr className="border-t border-[#2B2C2C] my-4 w-full" />
</div>
<div className="flex flex-row items-center space-x-2 overflow-x-auto">
{topics.map((t, i) => (
<div
key={i}
className={cn(
'border-[0.1px] rounded-full text-sm px-3 py-1 text-nowrap transition duration-200 cursor-pointer',
activeTopic === t.key
? 'text-cyan-300 bg-cyan-300/30 border-cyan-300/60'
: 'border-white/30 text-white/70 hover:text-white hover:border-white/40 hover:bg-white/5',
)}
onClick={() => setActiveTopic(t.key)}
>
<span>{t.display}</span>
</div>
))}
</div>
{loading ? (
<div className="flex flex-row items-center justify-center min-h-screen">
<svg
aria-hidden="true"
@@ -66,17 +120,7 @@ const Page = () => {
</svg>
</div>
) : (
<>
<div>
<div className="flex flex-col pt-4">
<div className="flex items-center">
<Search />
<h1 className="text-3xl font-medium p-2">Discover</h1>
</div>
<hr className="border-t border-[#2B2C2C] my-4 w-full" />
</div>
<div className="grid lg:grid-cols-3 sm:grid-cols-2 grid-cols-1 gap-4 pb-28 lg:pb-8 w-full justify-items-center lg:justify-items-start">
<div className="grid lg:grid-cols-3 sm:grid-cols-2 grid-cols-1 gap-4 pb-28 pt-5 lg:pb-8 w-full justify-items-center lg:justify-items-start">
{discover &&
discover?.map((item, i) => (
<Link
@@ -105,6 +149,7 @@ const Page = () => {
</Link>
))}
</div>
)}
</div>
</>
);

View File

@@ -1,4 +1,5 @@
import ChatWindow from '@/components/ChatWindow';
import { ChatProvider } from '@/lib/hooks/useChat';
import { Metadata } from 'next';
import { Suspense } from 'react';
@@ -11,7 +12,9 @@ const Home = () => {
return (
<div>
<Suspense>
<ChatProvider>
<ChatWindow />
</ChatProvider>
</Suspense>
</div>
);

View File

@@ -21,6 +21,7 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
ollamaApiKey: string;
lmStudioApiUrl: string;
deepseekApiKey: string;
aimlApiKey: string;
@@ -148,7 +149,9 @@ const Page = () => {
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
const [systemInstructions, setSystemInstructions] = useState<string>('');
const [temperatureUnit, setTemperatureUnit] = useState<'C' | 'F'>('C');
const [measureUnit, setMeasureUnit] = useState<'Imperial' | 'Metric'>(
'Metric',
);
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
useEffect(() => {
@@ -211,7 +214,9 @@ const Page = () => {
setSystemInstructions(localStorage.getItem('systemInstructions')!);
setTemperatureUnit(localStorage.getItem('temperatureUnit')! as 'C' | 'F');
setMeasureUnit(
localStorage.getItem('measureUnit')! as 'Imperial' | 'Metric',
);
setIsLoading(false);
};
@@ -371,8 +376,8 @@ const Page = () => {
localStorage.setItem('embeddingModel', value);
} else if (key === 'systemInstructions') {
localStorage.setItem('systemInstructions', value);
} else if (key === 'temperatureUnit') {
localStorage.setItem('temperatureUnit', value.toString());
} else if (key === 'measureUnit') {
localStorage.setItem('measureUnit', value.toString());
}
} catch (err) {
console.error('Failed to save:', err);
@@ -430,22 +435,22 @@ const Page = () => {
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Temperature Unit
Measurement Units
</p>
<Select
value={temperatureUnit ?? undefined}
value={measureUnit ?? undefined}
onChange={(e) => {
setTemperatureUnit(e.target.value as 'C' | 'F');
saveConfig('temperatureUnit', e.target.value);
setMeasureUnit(e.target.value as 'Imperial' | 'Metric');
saveConfig('measureUnit', e.target.value);
}}
options={[
{
label: 'Celsius',
value: 'C',
label: 'Metric',
value: 'Metric',
},
{
label: 'Fahrenheit',
value: 'F',
label: 'Imperial',
value: 'Imperial',
},
]}
/>
@@ -814,6 +819,25 @@ const Page = () => {
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Ollama API Key (Can be left blank)
</p>
<Input
type="text"
placeholder="Ollama API Key"
value={config.ollamaApiKey}
isSaving={savingStates['ollamaApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
ollamaApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('ollamaApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
GROQ API Key

View File

@@ -5,28 +5,11 @@ import MessageInput from './MessageInput';
import { File, Message } from './ChatWindow';
import MessageBox from './MessageBox';
import MessageBoxLoading from './MessageBoxLoading';
import { useChat } from '@/lib/hooks/useChat';
const Chat = () => {
const { messages, loading, messageAppeared } = useChat();
const Chat = ({
loading,
messages,
sendMessage,
messageAppeared,
rewrite,
fileIds,
setFileIds,
files,
setFiles,
}: {
messages: Message[];
sendMessage: (message: string) => void;
loading: boolean;
messageAppeared: boolean;
rewrite: (messageId: string) => void;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef<HTMLDivElement | null>(null);
const messageEnd = useRef<HTMLDivElement | null>(null);
@@ -72,12 +55,8 @@ const Chat = ({
key={i}
message={msg}
messageIndex={i}
history={messages}
loading={loading}
dividerRef={isLast ? dividerRef : undefined}
isLast={isLast}
rewrite={rewrite}
sendMessage={sendMessage}
/>
{!isLast && msg.role === 'assistant' && (
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
@@ -92,14 +71,7 @@ const Chat = ({
className="bottom-24 lg:bottom-10 fixed z-40"
style={{ width: dividerWidth }}
>
<MessageInput
loading={loading}
sendMessage={sendMessage}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<MessageInput />
</div>
)}
</div>

View File

@@ -1,17 +1,13 @@
'use client';
import { useEffect, useRef, useState } from 'react';
import { Document } from '@langchain/core/documents';
import Navbar from './Navbar';
import Chat from './Chat';
import EmptyChat from './EmptyChat';
import crypto from 'crypto';
import { toast } from 'sonner';
import { useSearchParams } from 'next/navigation';
import { getSuggestions } from '@/lib/actions';
import { Settings } from 'lucide-react';
import Link from 'next/link';
import NextError from 'next/error';
import { useChat } from '@/lib/hooks/useChat';
export type Message = {
messageId: string;
@@ -29,539 +25,8 @@ export interface File {
fileId: string;
}
interface ChatModelProvider {
name: string;
provider: string;
}
interface EmbeddingModelProvider {
name: string;
provider: string;
}
const checkConfig = async (
setChatModelProvider: (provider: ChatModelProvider) => void,
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
setIsConfigReady: (ready: boolean) => void,
setHasError: (hasError: boolean) => void,
) => {
try {
let chatModel = localStorage.getItem('chatModel');
let chatModelProvider = localStorage.getItem('chatModelProvider');
let embeddingModel = localStorage.getItem('embeddingModel');
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (!autoImageSearch) {
localStorage.setItem('autoImageSearch', 'true');
}
if (!autoVideoSearch) {
localStorage.setItem('autoVideoSearch', 'false');
}
const providers = await fetch(`/api/models`, {
headers: {
'Content-Type': 'application/json',
},
}).then(async (res) => {
if (!res.ok)
throw new Error(
`Failed to fetch models: ${res.status} ${res.statusText}`,
);
return res.json();
});
if (
!chatModel ||
!chatModelProvider ||
!embeddingModel ||
!embeddingModelProvider
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
}
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
)
return toast.error('No embedding models available');
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
}
localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider);
localStorage.setItem('embeddingModel', embeddingModel!);
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
} else {
const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders;
if (
Object.keys(chatModelProviders).length > 0 &&
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvidersKeys.find(
(key) => Object.keys(chatModelProviders[key]).length > 0,
) || chatModelProvidersKeys[0];
localStorage.setItem('chatModelProvider', chatModelProvider);
}
if (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
? chatModelProvider
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}
if (
Object.keys(embeddingModelProviders).length > 0 &&
!embeddingModelProviders[embeddingModelProvider]
) {
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
}
if (
embeddingModelProvider &&
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
) {
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
localStorage.setItem('embeddingModel', embeddingModel);
}
}
setChatModelProvider({
name: chatModel!,
provider: chatModelProvider,
});
setEmbeddingModelProvider({
name: embeddingModel!,
provider: embeddingModelProvider,
});
setIsConfigReady(true);
} catch (err) {
console.error('An error occurred while checking the configuration:', err);
setIsConfigReady(false);
setHasError(true);
}
};
const loadMessages = async (
chatId: string,
setMessages: (messages: Message[]) => void,
setIsMessagesLoaded: (loaded: boolean) => void,
setChatHistory: (history: [string, string][]) => void,
setFocusMode: (mode: string) => void,
setNotFound: (notFound: boolean) => void,
setFiles: (files: File[]) => void,
setFileIds: (fileIds: string[]) => void,
) => {
const res = await fetch(`/api/chats/${chatId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (res.status === 404) {
setNotFound(true);
setIsMessagesLoaded(true);
return;
}
const data = await res.json();
const messages = data.messages.map((msg: any) => {
return {
...msg,
...JSON.parse(msg.metadata),
};
}) as Message[];
setMessages(messages);
const history = messages.map((msg) => {
return [msg.role, msg.content];
}) as [string, string][];
console.debug(new Date(), 'app:messages_loaded');
document.title = messages[0].content;
const files = data.chat.files.map((file: any) => {
return {
fileName: file.name,
fileExtension: file.name.split('.').pop(),
fileId: file.fileId,
};
});
setFiles(files);
setFileIds(files.map((file: File) => file.fileId));
setChatHistory(history);
setFocusMode(data.chat.focusMode);
setIsMessagesLoaded(true);
};
const ChatWindow = ({ id }: { id?: string }) => {
const searchParams = useSearchParams();
const initialMessage = searchParams.get('q');
const [chatId, setChatId] = useState<string | undefined>(id);
const [newChatCreated, setNewChatCreated] = useState(false);
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
{
name: '',
provider: '',
},
);
const [embeddingModelProvider, setEmbeddingModelProvider] =
useState<EmbeddingModelProvider>({
name: '',
provider: '',
});
const [isConfigReady, setIsConfigReady] = useState(false);
const [hasError, setHasError] = useState(false);
const [isReady, setIsReady] = useState(false);
useEffect(() => {
checkConfig(
setChatModelProvider,
setEmbeddingModelProvider,
setIsConfigReady,
setHasError,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const [loading, setLoading] = useState(false);
const [messageAppeared, setMessageAppeared] = useState(false);
const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
const [messages, setMessages] = useState<Message[]>([]);
const [files, setFiles] = useState<File[]>([]);
const [fileIds, setFileIds] = useState<string[]>([]);
const [focusMode, setFocusMode] = useState('webSearch');
const [optimizationMode, setOptimizationMode] = useState('speed');
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
const [notFound, setNotFound] = useState(false);
useEffect(() => {
if (
chatId &&
!newChatCreated &&
!isMessagesLoaded &&
messages.length === 0
) {
loadMessages(
chatId,
setMessages,
setIsMessagesLoaded,
setChatHistory,
setFocusMode,
setNotFound,
setFiles,
setFileIds,
);
} else if (!chatId) {
setNewChatCreated(true);
setIsMessagesLoaded(true);
setChatId(crypto.randomBytes(20).toString('hex'));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const messagesRef = useRef<Message[]>([]);
useEffect(() => {
messagesRef.current = messages;
}, [messages]);
useEffect(() => {
if (isMessagesLoaded && isConfigReady) {
setIsReady(true);
console.debug(new Date(), 'app:ready');
} else {
setIsReady(false);
}
}, [isMessagesLoaded, isConfigReady]);
const sendMessage = async (message: string, messageId?: string) => {
if (loading) return;
if (!isConfigReady) {
toast.error('Cannot send message before the configuration is ready');
return;
}
setLoading(true);
setMessageAppeared(false);
let sources: Document[] | undefined = undefined;
let recievedMessage = '';
let added = false;
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
setMessages((prevMessages) => [
...prevMessages,
{
content: message,
messageId: messageId,
chatId: chatId!,
role: 'user',
createdAt: new Date(),
},
]);
const messageHandler = async (data: any) => {
if (data.type === 'error') {
toast.error(data.data);
setLoading(false);
return;
}
if (data.type === 'sources') {
sources = data.data;
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: '',
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessageAppeared(true);
}
if (data.type === 'message') {
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: data.data,
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessages((prev) =>
prev.map((message) => {
if (message.messageId === data.messageId) {
return { ...message, content: message.content + data.data };
}
return message;
}),
);
recievedMessage += data.data;
setMessageAppeared(true);
}
if (data.type === 'messageEnd') {
setChatHistory((prevHistory) => [
...prevHistory,
['human', message],
['assistant', recievedMessage],
]);
setLoading(false);
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (autoImageSearch === 'true') {
document
.getElementById(`search-images-${lastMsg.messageId}`)
?.click();
}
if (autoVideoSearch === 'true') {
document
.getElementById(`search-videos-${lastMsg.messageId}`)
?.click();
}
if (
lastMsg.role === 'assistant' &&
lastMsg.sources &&
lastMsg.sources.length > 0 &&
!lastMsg.suggestions
) {
const suggestions = await getSuggestions(messagesRef.current);
setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === lastMsg.messageId) {
return { ...msg, suggestions: suggestions };
}
return msg;
}),
);
}
}
};
const res = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
content: message,
message: {
messageId: messageId,
chatId: chatId!,
content: message,
},
chatId: chatId!,
files: fileIds,
focusMode: focusMode,
optimizationMode: optimizationMode,
history: chatHistory,
chatModel: {
name: chatModelProvider.name,
provider: chatModelProvider.provider,
},
embeddingModel: {
name: embeddingModelProvider.name,
provider: embeddingModelProvider.provider,
},
systemInstructions: localStorage.getItem('systemInstructions'),
}),
});
if (!res.body) throw new Error('No response body');
const reader = res.body?.getReader();
const decoder = new TextDecoder('utf-8');
let partialChunk = '';
while (true) {
const { value, done } = await reader.read();
if (done) break;
partialChunk += decoder.decode(value, { stream: true });
try {
const messages = partialChunk.split('\n');
for (const msg of messages) {
if (!msg.trim()) continue;
const json = JSON.parse(msg);
messageHandler(json);
}
partialChunk = '';
} catch (error) {
console.warn('Incomplete JSON, waiting for next chunk...');
}
}
};
const rewrite = (messageId: string) => {
const index = messages.findIndex((msg) => msg.messageId === messageId);
if (index === -1) return;
const message = messages[index - 1];
setMessages((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
setChatHistory((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
sendMessage(message.content, message.messageId);
};
useEffect(() => {
if (isReady && initialMessage && isConfigReady) {
sendMessage(initialMessage);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isConfigReady, isReady, initialMessage]);
const ChatWindow = () => {
const { hasError, isReady, notFound, messages } = useChat();
if (hasError) {
return (
<div className="relative">
@@ -586,31 +51,11 @@ const ChatWindow = ({ id }: { id?: string }) => {
<div>
{messages.length > 0 ? (
<>
<Navbar chatId={chatId!} messages={messages} />
<Chat
loading={loading}
messages={messages}
sendMessage={sendMessage}
messageAppeared={messageAppeared}
rewrite={rewrite}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<Navbar />
<Chat />
</>
) : (
<EmptyChat
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<EmptyChat />
)}
</div>
)

View File

@@ -5,27 +5,7 @@ import Link from 'next/link';
import WeatherWidget from './WeatherWidget';
import NewsArticleWidget from './NewsArticleWidget';
const EmptyChat = ({
sendMessage,
focusMode,
setFocusMode,
optimizationMode,
setOptimizationMode,
fileIds,
setFileIds,
files,
setFiles,
}: {
sendMessage: (message: string) => void;
focusMode: string;
setFocusMode: (mode: string) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const EmptyChat = () => {
return (
<div className="relative">
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
@@ -38,17 +18,7 @@ const EmptyChat = ({
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
Research begins here.
</h2>
<EmptyChatMessageInput
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<EmptyChatMessageInput />
</div>
<div className="flex flex-col w-full gap-4 mt-2 sm:flex-row sm:justify-center">
<div className="flex-1 w-full">

View File

@@ -1,34 +1,15 @@
import { ArrowRight } from 'lucide-react';
import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
import CopilotToggle from './MessageInputActions/Copilot';
import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization';
import Attach from './MessageInputActions/Attach';
import { File } from './ChatWindow';
import { useChat } from '@/lib/hooks/useChat';
const EmptyChatMessageInput = ({
sendMessage,
focusMode,
setFocusMode,
optimizationMode,
setOptimizationMode,
fileIds,
setFileIds,
files,
setFiles,
}: {
sendMessage: (message: string) => void;
focusMode: string;
setFocusMode: (mode: string) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [copilotEnabled, setCopilotEnabled] = useState(false);
const EmptyChatMessageInput = () => {
const { sendMessage } = useChat();
/* const [copilotEnabled, setCopilotEnabled] = useState(false); */
const [message, setMessage] = useState('');
const inputRef = useRef<HTMLTextAreaElement | null>(null);
@@ -84,20 +65,11 @@ const EmptyChatMessageInput = ({
/>
<div className="flex flex-row items-center justify-between mt-4">
<div className="flex flex-row items-center space-x-2 lg:space-x-4">
<Focus focusMode={focusMode} setFocusMode={setFocusMode} />
<Attach
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
showText
/>
<Focus />
<Attach showText />
</div>
<div className="flex flex-row items-center space-x-1 sm:space-x-4">
<Optimization
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
<Optimization />
<button
disabled={message.trim().length === 0}
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 disabled:bg-[#e0e0dc] dark:disabled:bg-[#ececec21] hover:bg-opacity-85 transition duration-100 rounded-full p-2"

View File

@@ -20,32 +20,36 @@ import SearchImages from './SearchImages';
import SearchVideos from './SearchVideos';
import { useSpeech } from 'react-text-to-speech';
import ThinkBox from './ThinkBox';
import { useChat } from '@/lib/hooks/useChat';
const ThinkTagProcessor = ({ children }: { children: React.ReactNode }) => {
return <ThinkBox content={children as string} />;
const ThinkTagProcessor = ({
children,
thinkingEnded,
}: {
children: React.ReactNode;
thinkingEnded: boolean;
}) => {
return (
<ThinkBox content={children as string} thinkingEnded={thinkingEnded} />
);
};
const MessageBox = ({
message,
messageIndex,
history,
loading,
dividerRef,
isLast,
rewrite,
sendMessage,
}: {
message: Message;
messageIndex: number;
history: Message[];
loading: boolean;
dividerRef?: MutableRefObject<HTMLDivElement | null>;
isLast: boolean;
rewrite: (messageId: string) => void;
sendMessage: (message: string) => void;
}) => {
const { loading, messages: history, sendMessage, rewrite } = useChat();
const [parsedMessage, setParsedMessage] = useState(message.content);
const [speechMessage, setSpeechMessage] = useState(message.content);
const [thinkingEnded, setThinkingEnded] = useState(false);
useEffect(() => {
const citationRegex = /\[([^\]]+)\]/g;
@@ -61,6 +65,10 @@ const MessageBox = ({
}
}
if (message.role === 'assistant' && message.content.includes('</think>')) {
setThinkingEnded(true);
}
if (
message.role === 'assistant' &&
message?.sources &&
@@ -88,7 +96,7 @@ const MessageBox = ({
if (url) {
return `<a href="${url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${numStr}</a>`;
} else {
return `[${numStr}]`;
return ``;
}
})
.join('');
@@ -99,6 +107,14 @@ const MessageBox = ({
);
setSpeechMessage(message.content.replace(regex, ''));
return;
} else if (
message.role === 'assistant' &&
message?.sources &&
message.sources.length === 0
) {
setParsedMessage(processedMessage.replace(regex, ''));
setSpeechMessage(message.content.replace(regex, ''));
return;
}
setSpeechMessage(message.content.replace(regex, ''));
@@ -111,6 +127,9 @@ const MessageBox = ({
overrides: {
think: {
component: ThinkTagProcessor,
props: {
thinkingEnded: thinkingEnded,
},
},
},
};

View File

@@ -6,22 +6,11 @@ import Attach from './MessageInputActions/Attach';
import CopilotToggle from './MessageInputActions/Copilot';
import { File } from './ChatWindow';
import AttachSmall from './MessageInputActions/AttachSmall';
import { useChat } from '@/lib/hooks/useChat';
const MessageInput = () => {
const { loading, sendMessage } = useChat();
const MessageInput = ({
sendMessage,
loading,
fileIds,
setFileIds,
files,
setFiles,
}: {
sendMessage: (message: string) => void;
loading: boolean;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [copilotEnabled, setCopilotEnabled] = useState(false);
const [message, setMessage] = useState('');
const [textareaRows, setTextareaRows] = useState(1);
@@ -79,14 +68,7 @@ const MessageInput = ({
mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full',
)}
>
{mode === 'single' && (
<AttachSmall
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
)}
{mode === 'single' && <AttachSmall />}
<TextareaAutosize
ref={inputRef}
value={message}
@@ -113,12 +95,7 @@ const MessageInput = ({
)}
{mode === 'multi' && (
<div className="flex flex-row items-center justify-between w-full pt-2">
<AttachSmall
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<AttachSmall />
<div className="flex flex-row items-center space-x-4">
<CopilotToggle
copilotEnabled={copilotEnabled}

View File

@@ -8,20 +8,11 @@ import {
import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react';
import { Fragment, useRef, useState } from 'react';
import { File as FileType } from '../ChatWindow';
import { useChat } from '@/lib/hooks/useChat';
const Attach = ({ showText }: { showText?: boolean }) => {
const { files, setFiles, setFileIds, fileIds } = useChat();
const Attach = ({
fileIds,
setFileIds,
showText,
files,
setFiles,
}: {
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
showText?: boolean;
files: FileType[];
setFiles: (files: FileType[]) => void;
}) => {
const [loading, setLoading] = useState(false);
const fileInputRef = useRef<any>();

View File

@@ -8,18 +8,11 @@ import {
import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react';
import { Fragment, useRef, useState } from 'react';
import { File as FileType } from '../ChatWindow';
import { useChat } from '@/lib/hooks/useChat';
const AttachSmall = () => {
const { files, setFiles, setFileIds, fileIds } = useChat();
const AttachSmall = ({
fileIds,
setFileIds,
files,
setFiles,
}: {
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: FileType[];
setFiles: (files: FileType[]) => void;
}) => {
const [loading, setLoading] = useState(false);
const fileInputRef = useRef<any>();

View File

@@ -15,6 +15,7 @@ import {
} from '@headlessui/react';
import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons';
import { Fragment } from 'react';
import { useChat } from '@/lib/hooks/useChat';
const focusModes = [
{
@@ -55,13 +56,9 @@ const focusModes = [
},
];
const Focus = ({
focusMode,
setFocusMode,
}: {
focusMode: string;
setFocusMode: (mode: string) => void;
}) => {
const Focus = () => {
const { focusMode, setFocusMode } = useChat();
return (
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
<PopoverButton

View File

@@ -7,6 +7,7 @@ import {
Transition,
} from '@headlessui/react';
import { Fragment } from 'react';
import { useChat } from '@/lib/hooks/useChat';
const OptimizationModes = [
{
@@ -34,13 +35,9 @@ const OptimizationModes = [
},
];
const Optimization = ({
optimizationMode,
setOptimizationMode,
}: {
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
const Optimization = () => {
const { optimizationMode, setOptimizationMode } = useChat();
return (
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
<PopoverButton

View File

@@ -10,6 +10,7 @@ import {
Transition,
} from '@headlessui/react';
import jsPDF from 'jspdf';
import { useChat } from '@/lib/hooks/useChat';
const downloadFile = (filename: string, content: string, type: string) => {
const blob = new Blob([content], { type });
@@ -118,16 +119,12 @@ const exportAsPDF = (messages: Message[], title: string) => {
doc.save(`${title || 'chat'}.pdf`);
};
const Navbar = ({
chatId,
messages,
}: {
messages: Message[];
chatId: string;
}) => {
const Navbar = () => {
const [title, setTitle] = useState<string>('');
const [timeAgo, setTimeAgo] = useState<string>('');
const { messages, chatId } = useChat();
useEffect(() => {
if (messages.length > 0) {
const newTitle =
@@ -206,7 +203,7 @@ const Navbar = ({
</PopoverPanel>
</Transition>
</Popover>
<DeleteChat redirect chatId={chatId} chats={[]} setChats={() => {}} />
<DeleteChat redirect chatId={chatId!} chats={[]} setChats={() => {}} />
</div>
</div>
);

View File

@@ -1,15 +1,23 @@
'use client';
import { useState } from 'react';
import { cn } from '@/lib/utils';
import { useEffect, useState } from 'react';
import { ChevronDown, ChevronUp, BrainCircuit } from 'lucide-react';
interface ThinkBoxProps {
content: string;
thinkingEnded: boolean;
}
const ThinkBox = ({ content }: ThinkBoxProps) => {
const [isExpanded, setIsExpanded] = useState(false);
const ThinkBox = ({ content, thinkingEnded }: ThinkBoxProps) => {
const [isExpanded, setIsExpanded] = useState(true);
useEffect(() => {
if (thinkingEnded) {
setIsExpanded(false);
} else {
setIsExpanded(true);
}
}, [thinkingEnded]);
return (
<div className="my-4 bg-light-secondary/50 dark:bg-dark-secondary/50 rounded-xl border border-light-200 dark:border-dark-200 overflow-hidden">

View File

@@ -10,6 +10,7 @@ const WeatherWidget = () => {
windSpeed: 0,
icon: '',
temperatureUnit: 'C',
windSpeedUnit: 'm/s',
});
const [loading, setLoading] = useState(true);
@@ -75,7 +76,7 @@ const WeatherWidget = () => {
body: JSON.stringify({
lat: location.latitude,
lng: location.longitude,
temperatureUnit: localStorage.getItem('temperatureUnit') ?? 'C',
measureUnit: localStorage.getItem('measureUnit') ?? 'Metric',
}),
});
@@ -95,6 +96,7 @@ const WeatherWidget = () => {
windSpeed: data.windSpeed,
icon: data.icon,
temperatureUnit: data.temperatureUnit,
windSpeedUnit: data.windSpeedUnit,
});
setLoading(false);
});
@@ -139,7 +141,7 @@ const WeatherWidget = () => {
</span>
<span className="flex items-center text-xs text-black/60 dark:text-white/60">
<Wind className="w-3 h-3 mr-1" />
{data.windSpeed} km/h
{data.windSpeed} {data.windSpeedUnit}
</span>
</div>
<span className="text-xs text-black/60 dark:text-white/60 mt-1">

View File

@@ -3,32 +3,18 @@ import {
RunnableMap,
RunnableLambda,
} from '@langchain/core/runnables';
import { PromptTemplate } from '@langchain/core/prompts';
import { ChatPromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers';
import { searchSearxng } from '../searxng';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import LineOutputParser from '../outputParsers/lineOutputParser';
const imageSearchChainPrompt = `
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
Example:
1. Follow up question: What is a cat?
Rephrased: A cat
2. Follow up question: What is a car? How does it works?
Rephrased: Car working
3. Follow up question: How does an AC work?
Rephrased: AC working
Conversation:
{chat_history}
Follow up question: {query}
Rephrased question:
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
`;
type ImageSearchChainInput = {
@@ -54,12 +40,39 @@ const createImageSearchChain = (llm: BaseChatModel) => {
return input.query;
},
}),
PromptTemplate.fromTemplate(imageSearchChainPrompt),
ChatPromptTemplate.fromMessages([
['system', imageSearchChainPrompt],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
],
['assistant', '<query>A cat</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
],
['assistant', '<query>Car working</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
],
['assistant', '<query>AC working</query>'],
[
'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
],
]),
llm,
strParser,
RunnableLambda.from(async (input: string) => {
input = input.replace(/<think>.*?<\/think>/g, '');
const queryParser = new LineOutputParser({
key: 'query',
});
return await queryParser.parse(input);
}),
RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, {
engines: ['bing images', 'google images'],
});

View File

@@ -3,32 +3,18 @@ import {
RunnableMap,
RunnableLambda,
} from '@langchain/core/runnables';
import { PromptTemplate } from '@langchain/core/prompts';
import { ChatPromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers';
import { searchSearxng } from '../searxng';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import LineOutputParser from '../outputParsers/lineOutputParser';
const VideoSearchChainPrompt = `
const videoSearchChainPrompt = `
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
Example:
1. Follow up question: How does a car work?
Rephrased: How does a car work?
2. Follow up question: What is the theory of relativity?
Rephrased: What is theory of relativity
3. Follow up question: How does an AC work?
Rephrased: How does an AC work
Conversation:
{chat_history}
Follow up question: {query}
Rephrased question:
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
`;
type VideoSearchChainInput = {
@@ -55,12 +41,37 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
return input.query;
},
}),
PromptTemplate.fromTemplate(VideoSearchChainPrompt),
ChatPromptTemplate.fromMessages([
['system', videoSearchChainPrompt],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
],
['assistant', '<query>How does a car work?</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
],
['assistant', '<query>Theory of relativity</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
],
['assistant', '<query>AC working</query>'],
[
'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
],
]),
llm,
strParser,
RunnableLambda.from(async (input: string) => {
input = input.replace(/<think>.*?<\/think>/g, '');
const queryParser = new LineOutputParser({
key: 'query',
});
return await queryParser.parse(input);
}),
RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, {
engines: ['youtube'],
});
@@ -92,8 +103,8 @@ const handleVideoSearch = (
input: VideoSearchChainInput,
llm: BaseChatModel,
) => {
const VideoSearchChain = createVideoSearchChain(llm);
return VideoSearchChain.invoke(input);
const videoSearchChain = createVideoSearchChain(llm);
return videoSearchChain.invoke(input);
};
export default handleVideoSearch;

View File

@@ -31,6 +31,7 @@ interface Config {
};
OLLAMA: {
API_URL: string;
API_KEY: string;
};
DEEPSEEK: {
API_KEY: string;
@@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;

643
src/lib/hooks/useChat.tsx Normal file
View File

@@ -0,0 +1,643 @@
'use client';
import { Message } from '@/components/ChatWindow';
import { createContext, useContext, useEffect, useRef, useState } from 'react';
import crypto from 'crypto';
import { useSearchParams } from 'next/navigation';
import { toast } from 'sonner';
import { Document } from '@langchain/core/documents';
import { getSuggestions } from '../actions';
type ChatContext = {
messages: Message[];
chatHistory: [string, string][];
files: File[];
fileIds: string[];
focusMode: string;
chatId: string | undefined;
optimizationMode: string;
isMessagesLoaded: boolean;
loading: boolean;
notFound: boolean;
messageAppeared: boolean;
isReady: boolean;
hasError: boolean;
setOptimizationMode: (mode: string) => void;
setFocusMode: (mode: string) => void;
setFiles: (files: File[]) => void;
setFileIds: (fileIds: string[]) => void;
sendMessage: (
message: string,
messageId?: string,
rewrite?: boolean,
) => Promise<void>;
rewrite: (messageId: string) => void;
};
export interface File {
fileName: string;
fileExtension: string;
fileId: string;
}
interface ChatModelProvider {
name: string;
provider: string;
}
interface EmbeddingModelProvider {
name: string;
provider: string;
}
const checkConfig = async (
setChatModelProvider: (provider: ChatModelProvider) => void,
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
setIsConfigReady: (ready: boolean) => void,
setHasError: (hasError: boolean) => void,
) => {
try {
let chatModel = localStorage.getItem('chatModel');
let chatModelProvider = localStorage.getItem('chatModelProvider');
let embeddingModel = localStorage.getItem('embeddingModel');
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (!autoImageSearch) {
localStorage.setItem('autoImageSearch', 'true');
}
if (!autoVideoSearch) {
localStorage.setItem('autoVideoSearch', 'false');
}
const providers = await fetch(`/api/models`, {
headers: {
'Content-Type': 'application/json',
},
}).then(async (res) => {
if (!res.ok)
throw new Error(
`Failed to fetch models: ${res.status} ${res.statusText}`,
);
return res.json();
});
if (
!chatModel ||
!chatModelProvider ||
!embeddingModel ||
!embeddingModelProvider
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
}
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
)
return toast.error('No embedding models available');
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
}
localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider);
localStorage.setItem('embeddingModel', embeddingModel!);
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
} else {
const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders;
if (
Object.keys(chatModelProviders).length > 0 &&
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvidersKeys.find(
(key) => Object.keys(chatModelProviders[key]).length > 0,
) || chatModelProvidersKeys[0];
localStorage.setItem('chatModelProvider', chatModelProvider);
}
if (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
? chatModelProvider
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}
if (
Object.keys(embeddingModelProviders).length > 0 &&
!embeddingModelProviders[embeddingModelProvider]
) {
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
}
if (
embeddingModelProvider &&
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
) {
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
localStorage.setItem('embeddingModel', embeddingModel);
}
}
setChatModelProvider({
name: chatModel!,
provider: chatModelProvider,
});
setEmbeddingModelProvider({
name: embeddingModel!,
provider: embeddingModelProvider,
});
setIsConfigReady(true);
} catch (err) {
console.error('An error occurred while checking the configuration:', err);
setIsConfigReady(false);
setHasError(true);
}
};
const loadMessages = async (
chatId: string,
setMessages: (messages: Message[]) => void,
setIsMessagesLoaded: (loaded: boolean) => void,
setChatHistory: (history: [string, string][]) => void,
setFocusMode: (mode: string) => void,
setNotFound: (notFound: boolean) => void,
setFiles: (files: File[]) => void,
setFileIds: (fileIds: string[]) => void,
) => {
const res = await fetch(`/api/chats/${chatId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (res.status === 404) {
setNotFound(true);
setIsMessagesLoaded(true);
return;
}
const data = await res.json();
const messages = data.messages.map((msg: any) => {
return {
...msg,
...JSON.parse(msg.metadata),
};
}) as Message[];
setMessages(messages);
const history = messages.map((msg) => {
return [msg.role, msg.content];
}) as [string, string][];
console.debug(new Date(), 'app:messages_loaded');
document.title = messages[0].content;
const files = data.chat.files.map((file: any) => {
return {
fileName: file.name,
fileExtension: file.name.split('.').pop(),
fileId: file.fileId,
};
});
setFiles(files);
setFileIds(files.map((file: File) => file.fileId));
setChatHistory(history);
setFocusMode(data.chat.focusMode);
setIsMessagesLoaded(true);
};
export const chatContext = createContext<ChatContext>({
chatHistory: [],
chatId: '',
fileIds: [],
files: [],
focusMode: '',
hasError: false,
isMessagesLoaded: false,
isReady: false,
loading: false,
messageAppeared: false,
messages: [],
notFound: false,
optimizationMode: '',
rewrite: () => {},
sendMessage: async () => {},
setFileIds: () => {},
setFiles: () => {},
setFocusMode: () => {},
setOptimizationMode: () => {},
});
export const ChatProvider = ({
children,
id,
}: {
children: React.ReactNode;
id?: string;
}) => {
const searchParams = useSearchParams();
const initialMessage = searchParams.get('q');
const [chatId, setChatId] = useState<string | undefined>(id);
const [newChatCreated, setNewChatCreated] = useState(false);
const [loading, setLoading] = useState(false);
const [messageAppeared, setMessageAppeared] = useState(false);
const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
const [messages, setMessages] = useState<Message[]>([]);
const [files, setFiles] = useState<File[]>([]);
const [fileIds, setFileIds] = useState<string[]>([]);
const [focusMode, setFocusMode] = useState('webSearch');
const [optimizationMode, setOptimizationMode] = useState('speed');
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
const [notFound, setNotFound] = useState(false);
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
{
name: '',
provider: '',
},
);
const [embeddingModelProvider, setEmbeddingModelProvider] =
useState<EmbeddingModelProvider>({
name: '',
provider: '',
});
const [isConfigReady, setIsConfigReady] = useState(false);
const [hasError, setHasError] = useState(false);
const [isReady, setIsReady] = useState(false);
const messagesRef = useRef<Message[]>([]);
useEffect(() => {
checkConfig(
setChatModelProvider,
setEmbeddingModelProvider,
setIsConfigReady,
setHasError,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (
chatId &&
!newChatCreated &&
!isMessagesLoaded &&
messages.length === 0
) {
loadMessages(
chatId,
setMessages,
setIsMessagesLoaded,
setChatHistory,
setFocusMode,
setNotFound,
setFiles,
setFileIds,
);
} else if (!chatId) {
setNewChatCreated(true);
setIsMessagesLoaded(true);
setChatId(crypto.randomBytes(20).toString('hex'));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
messagesRef.current = messages;
}, [messages]);
useEffect(() => {
if (isMessagesLoaded && isConfigReady) {
setIsReady(true);
console.debug(new Date(), 'app:ready');
} else {
setIsReady(false);
}
}, [isMessagesLoaded, isConfigReady]);
const rewrite = (messageId: string) => {
const index = messages.findIndex((msg) => msg.messageId === messageId);
if (index === -1) return;
const message = messages[index - 1];
setMessages((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
setChatHistory((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
sendMessage(message.content, message.messageId, true);
};
useEffect(() => {
if (isReady && initialMessage && isConfigReady) {
if (!isConfigReady) {
toast.error('Cannot send message before the configuration is ready');
return;
}
sendMessage(initialMessage);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isConfigReady, isReady, initialMessage]);
const sendMessage: ChatContext['sendMessage'] = async (
message,
messageId,
rewrite = false,
) => {
if (loading) return;
setLoading(true);
setMessageAppeared(false);
let sources: Document[] | undefined = undefined;
let recievedMessage = '';
let added = false;
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
setMessages((prevMessages) => [
...prevMessages,
{
content: message,
messageId: messageId,
chatId: chatId!,
role: 'user',
createdAt: new Date(),
},
]);
const messageHandler = async (data: any) => {
if (data.type === 'error') {
toast.error(data.data);
setLoading(false);
return;
}
if (data.type === 'sources') {
sources = data.data;
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: '',
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessageAppeared(true);
}
if (data.type === 'message') {
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: data.data,
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessages((prev) =>
prev.map((message) => {
if (message.messageId === data.messageId) {
return { ...message, content: message.content + data.data };
}
return message;
}),
);
recievedMessage += data.data;
setMessageAppeared(true);
}
if (data.type === 'messageEnd') {
setChatHistory((prevHistory) => [
...prevHistory,
['human', message],
['assistant', recievedMessage],
]);
setLoading(false);
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (autoImageSearch === 'true') {
document
.getElementById(`search-images-${lastMsg.messageId}`)
?.click();
}
if (autoVideoSearch === 'true') {
document
.getElementById(`search-videos-${lastMsg.messageId}`)
?.click();
}
if (
lastMsg.role === 'assistant' &&
lastMsg.sources &&
lastMsg.sources.length > 0 &&
!lastMsg.suggestions
) {
const suggestions = await getSuggestions(messagesRef.current);
setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === lastMsg.messageId) {
return { ...msg, suggestions: suggestions };
}
return msg;
}),
);
}
}
};
const messageIndex = messages.findIndex((m) => m.messageId === messageId);
const res = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
content: message,
message: {
messageId: messageId,
chatId: chatId!,
content: message,
},
chatId: chatId!,
files: fileIds,
focusMode: focusMode,
optimizationMode: optimizationMode,
history: rewrite
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
: chatHistory,
chatModel: {
name: chatModelProvider.name,
provider: chatModelProvider.provider,
},
embeddingModel: {
name: embeddingModelProvider.name,
provider: embeddingModelProvider.provider,
},
systemInstructions: localStorage.getItem('systemInstructions'),
}),
});
if (!res.body) throw new Error('No response body');
const reader = res.body?.getReader();
const decoder = new TextDecoder('utf-8');
let partialChunk = '';
while (true) {
const { value, done } = await reader.read();
if (done) break;
partialChunk += decoder.decode(value, { stream: true });
try {
const messages = partialChunk.split('\n');
for (const msg of messages) {
if (!msg.trim()) continue;
const json = JSON.parse(msg);
messageHandler(json);
}
partialChunk = '';
} catch (error) {
console.warn('Incomplete JSON, waiting for next chunk...');
}
}
};
return (
<chatContext.Provider
value={{
messages,
chatHistory,
files,
fileIds,
focusMode,
chatId,
hasError,
isMessagesLoaded,
isReady,
loading,
messageAppeared,
notFound,
optimizationMode,
setFileIds,
setFiles,
setFocusMode,
setOptimizationMode,
rewrite,
sendMessage,
}}
>
{children}
</chatContext.Provider>
);
};
export const useChat = () => {
const ctx = useContext(chatContext);
return ctx;
};

View File

@@ -38,7 +38,7 @@ export const loadAimlApiChatModels = async () => {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
openAIApiKey: apiKey,
apiKey: apiKey,
modelName: model.id,
temperature: 0.7,
configuration: {
@@ -76,7 +76,7 @@ export const loadAimlApiEmbeddingModels = async () => {
embeddingModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
openAIApiKey: apiKey,
apiKey: apiKey,
modelName: model.id,
configuration: {
baseURL: API_URL,

View File

@@ -9,6 +9,18 @@ export const PROVIDER_INFO = {
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const anthropicChatModels: Record<string, string>[] = [
{
displayName: 'Claude 4.1 Opus',
key: 'claude-opus-4-1-20250805',
},
{
displayName: 'Claude 4 Opus',
key: 'claude-opus-4-20250514',
},
{
displayName: 'Claude 4 Sonnet',
key: 'claude-sonnet-4-20250514',
},
{
displayName: 'Claude 3.7 Sonnet',
key: 'claude-3-7-sonnet-20250219',

View File

@@ -31,7 +31,7 @@ export const loadDeepseekChatModels = async () => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: deepseekApiKey,
apiKey: deepseekApiKey,
modelName: model.key,
temperature: 0.7,
configuration: {

View File

@@ -14,16 +14,16 @@ import { Embeddings } from '@langchain/core/embeddings';
const geminiChatModels: Record<string, string>[] = [
{
displayName: 'Gemini 2.5 Flash Preview 05-20',
key: 'gemini-2.5-flash-preview-05-20',
displayName: 'Gemini 2.5 Flash',
key: 'gemini-2.5-flash',
},
{
displayName: 'Gemini 2.5 Pro Preview',
key: 'gemini-2.5-pro-preview-05-06',
displayName: 'Gemini 2.5 Flash-Lite',
key: 'gemini-2.5-flash-lite',
},
{
displayName: 'Gemini 2.5 Pro Experimental',
key: 'gemini-2.5-pro-preview-05-06',
displayName: 'Gemini 2.5 Pro',
key: 'gemini-2.5-pro',
},
{
displayName: 'Gemini 2.0 Flash',
@@ -75,7 +75,7 @@ export const loadGeminiChatModels = async () => {
displayName: model.displayName,
model: new ChatGoogleGenerativeAI({
apiKey: geminiApiKey,
modelName: model.key,
model: model.key,
temperature: 0.7,
}) as unknown as BaseChatModel,
};
@@ -108,7 +108,7 @@ export const loadGeminiEmbeddingModels = async () => {
return embeddingModels;
} catch (err) {
console.error(`Error loading OpenAI embeddings models: ${err}`);
console.error(`Error loading Gemini embeddings models: ${err}`);
return {};
}
};

View File

@@ -1,4 +1,4 @@
import { ChatOpenAI } from '@langchain/openai';
import { ChatGroq } from '@langchain/groq';
import { getGroqApiKey } from '../config';
import { ChatModel } from '.';
@@ -28,13 +28,10 @@ export const loadGroqChatModels = async () => {
groqChatModels.forEach((model: any) => {
chatModels[model.id] = {
displayName: model.id,
model: new ChatOpenAI({
openAIApiKey: groqApiKey,
modelName: model.id,
model: new ChatGroq({
apiKey: groqApiKey,
model: model.id,
temperature: 0.7,
configuration: {
baseURL: 'https://api.groq.com/openai/v1',
},
}) as unknown as BaseChatModel,
};
});

View File

@@ -118,7 +118,7 @@ export const getAvailableChatModelProviders = async () => {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
apiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
temperature: 0.7,
configuration: {

View File

@@ -47,7 +47,7 @@ export const loadLMStudioChatModels = async () => {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
openAIApiKey: 'lm-studio',
apiKey: 'lm-studio',
configuration: {
baseURL: ensureV1Endpoint(endpoint),
},
@@ -83,7 +83,7 @@ export const loadLMStudioEmbeddingsModels = async () => {
embeddingsModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
openAIApiKey: 'lm-studio',
apiKey: 'lm-studio',
configuration: {
baseURL: ensureV1Endpoint(endpoint),
},

View File

@@ -1,16 +1,17 @@
import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint } from '../config';
import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'ollama',
displayName: 'Ollama',
};
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { ChatOllama } from '@langchain/ollama';
import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => {
model: model.model,
temperature: 0.7,
keepAlive: getKeepAlive(),
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}),
};
});
@@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint,
model: model.model,
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}),
};
});

View File

@@ -42,6 +42,18 @@ const openaiChatModels: Record<string, string>[] = [
displayName: 'GPT 4.1',
key: 'gpt-4.1',
},
{
displayName: 'GPT 5 nano',
key: 'gpt-5-nano',
},
{
displayName: 'GPT 5 mini',
key: 'gpt-5-mini',
},
{
displayName: 'GPT 5',
key: 'gpt-5',
},
];
const openaiEmbeddingModels: Record<string, string>[] = [
@@ -67,9 +79,9 @@ export const loadOpenAIChatModels = async () => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: openaiApiKey,
apiKey: openaiApiKey,
modelName: model.key,
temperature: 0.7,
temperature: model.key.includes('gpt-5') ? 1 : 0.7,
}) as unknown as BaseChatModel,
};
});
@@ -93,7 +105,7 @@ export const loadOpenAIEmbeddingModels = async () => {
embeddingModels[model.key] = {
displayName: model.displayName,
model: new OpenAIEmbeddings({
openAIApiKey: openaiApiKey,
apiKey: openaiApiKey,
modelName: model.key,
}) as unknown as Embeddings,
};

View File

@@ -1,8 +1,11 @@
import { BaseMessage } from '@langchain/core/messages';
import { BaseMessage, isAIMessage } from '@langchain/core/messages';
const formatChatHistoryAsString = (history: BaseMessage[]) => {
return history
.map((message) => `${message._getType()}: ${message.content}`)
.map(
(message) =>
`${isAIMessage(message) ? 'AI' : 'User'}: ${message.content}`,
)
.join('\n');
};

701
yarn.lock

File diff suppressed because it is too large Load Diff