Compare commits

..

8 Commits

Author SHA1 Message Date
ItzCrazyKns
83f1c6ce12 Merge pull request #736 from ItzCrazyKns/master
Merge master into feat/deep-research
2025-04-08 12:28:46 +05:30
ItzCrazyKns
fd6c58734d feat(metaSearchAgent): add quality optimization mode 2025-04-08 12:27:48 +05:30
ItzCrazyKns
114a7aa09d Merge pull request #728 from ItzCrazyKns/master-deep-research
Merge master into feat/deep-research
2025-04-07 10:21:34 +05:30
ItzCrazyKns
d0ba8c9038 Merge branch 'feat/deep-research' into master-deep-research 2025-04-07 10:21:22 +05:30
ItzCrazyKns
934fb0a23b Update metaSearchAgent.ts 2025-04-07 10:18:11 +05:30
ItzCrazyKns
8ecf3b4e99 feat(chat-window): update message handling 2025-04-02 13:02:45 +05:30
ItzCrazyKns
b5ee8386e7 Merge pull request #714 from ItzCrazyKns/master
Merge master into feat/deep-research
2025-04-01 14:16:45 +05:30
ItzCrazyKns
0fcd598ff7 feat(metaSearchAgent): eliminate runnables 2025-03-24 17:27:54 +05:30
28 changed files with 536 additions and 2904 deletions

View File

@ -25,8 +25,5 @@ API_URL = "" # Ollama API URL - http://host.docker.internal:11434
[MODELS.DEEPSEEK] [MODELS.DEEPSEEK]
API_KEY = "" API_KEY = ""
[MODELS.LM_STUDIO]
API_URL = "" # LM Studio API URL - http://host.docker.internal:1234
[API_ENDPOINTS] [API_ENDPOINTS]
SEARXNG = "" # SearxNG API URL - http://localhost:32768 SEARXNG = "" # SearxNG API URL - http://localhost:32768

View File

@ -1,124 +0,0 @@
/**
* Default categories and functions for generating search queries
*/
import { LANGUAGE_SPECIFIC_SOURCES } from './languages';
/**
* Default English categories and their sources
*/
export const DEFAULT_CATEGORIES: Record<string, { site: string; keyword: string }[]> = {
'Technology': [
{ site: 'techcrunch.com', keyword: 'tech' },
{ site: 'wired.com', keyword: 'technology' },
{ site: 'theverge.com', keyword: 'tech' },
{ site: 'arstechnica.com', keyword: 'technology' },
{ site: 'thenextweb.com', keyword: 'tech' }
],
'AI': [
{ site: 'ai.googleblog.com', keyword: 'AI' },
{ site: 'openai.com/blog', keyword: 'AI' },
{ site: 'venturebeat.com', keyword: 'artificial intelligence' },
{ site: 'techcrunch.com', keyword: 'artificial intelligence' },
{ site: 'technologyreview.mit.edu', keyword: 'AI' }
],
'Sports': [
{ site: 'espn.com', keyword: 'sports' },
{ site: 'sports.yahoo.com', keyword: 'sports' },
{ site: 'cbssports.com', keyword: 'sports' },
{ site: 'si.com', keyword: 'sports' },
{ site: 'bleacherreport.com', keyword: 'sports' }
],
'Money': [
{ site: 'bloomberg.com', keyword: 'finance' },
{ site: 'cnbc.com', keyword: 'money' },
{ site: 'wsj.com', keyword: 'finance' },
{ site: 'ft.com', keyword: 'finance' },
{ site: 'economist.com', keyword: 'economy' }
],
'Gaming': [
{ site: 'ign.com', keyword: 'games' },
{ site: 'gamespot.com', keyword: 'gaming' },
{ site: 'polygon.com', keyword: 'games' },
{ site: 'kotaku.com', keyword: 'gaming' },
{ site: 'eurogamer.net', keyword: 'games' }
],
'Entertainment': [
{ site: 'variety.com', keyword: 'entertainment' },
{ site: 'hollywoodreporter.com', keyword: 'entertainment' },
{ site: 'ew.com', keyword: 'entertainment' },
{ site: 'deadline.com', keyword: 'entertainment' },
{ site: 'rollingstone.com', keyword: 'entertainment' }
],
'Art and Culture': [
{ site: 'artnews.com', keyword: 'art' },
{ site: 'artsy.net', keyword: 'art' },
{ site: 'theartnewspaper.com', keyword: 'art' },
{ site: 'nytimes.com/section/arts', keyword: 'culture' },
{ site: 'culturalweekly.com', keyword: 'culture' }
],
'Science': [
{ site: 'scientificamerican.com', keyword: 'science' },
{ site: 'nature.com', keyword: 'science' },
{ site: 'science.org', keyword: 'science' },
{ site: 'newscientist.com', keyword: 'science' },
{ site: 'popsci.com', keyword: 'science' }
],
'Health': [
{ site: 'webmd.com', keyword: 'health' },
{ site: 'health.harvard.edu', keyword: 'health' },
{ site: 'mayoclinic.org', keyword: 'health' },
{ site: 'nih.gov', keyword: 'health' },
{ site: 'medicalnewstoday.com', keyword: 'health' }
],
'Travel': [
{ site: 'travelandleisure.com', keyword: 'travel' },
{ site: 'lonelyplanet.com', keyword: 'travel' },
{ site: 'tripadvisor.com', keyword: 'travel' },
{ site: 'nationalgeographic.com', keyword: 'travel' },
{ site: 'cntraveler.com', keyword: 'travel' }
],
'Current News': [
{ site: 'reuters.com', keyword: 'news' },
{ site: 'apnews.com', keyword: 'news' },
{ site: 'bbc.com', keyword: 'news' },
{ site: 'npr.org', keyword: 'news' },
{ site: 'aljazeera.com', keyword: 'news' }
]
};
/**
* Helper function to get search queries for a category
* Prioritizes language-specific sources if available
*/
export function getSearchQueriesForCategory(category: string, language?: string): { site: string; keyword: string }[] {
// Check if we have language-specific sources for this language and category
if (language &&
LANGUAGE_SPECIFIC_SOURCES[language] &&
LANGUAGE_SPECIFIC_SOURCES[language][category]) {
return LANGUAGE_SPECIFIC_SOURCES[language][category];
}
// For Chinese variants, try the general zh sources
if (language &&
(language.startsWith('zh') || language.includes('Hans') || language.includes('Hant')) &&
LANGUAGE_SPECIFIC_SOURCES['zh'] &&
LANGUAGE_SPECIFIC_SOURCES['zh'][category]) {
return LANGUAGE_SPECIFIC_SOURCES['zh'][category];
}
// If no language-specific sources, use the default English sources
return DEFAULT_CATEGORIES[category] || DEFAULT_CATEGORIES['Technology'];
}
/**
* Default high-quality sources for the default view
*/
export const DEFAULT_SOURCES = [
{ site: 'techcrunch.com', keyword: 'tech' },
{ site: 'wired.com', keyword: 'technology' },
{ site: 'theverge.com', keyword: 'tech' },
{ site: 'venturebeat.com', keyword: 'artificial intelligence' },
{ site: 'technologyreview.mit.edu', keyword: 'AI' },
{ site: 'ai.googleblog.com', keyword: 'AI' }
];

File diff suppressed because it is too large Load Diff

View File

@ -1,128 +0,0 @@
import db from "@/lib/db";
import { userPreferences } from "@/lib/db/schema";
import { eq } from "drizzle-orm";
// GET handler to retrieve user preferences
export const GET = async (req: Request) => {
try {
console.log('[Preferences] Retrieving user preferences');
// In a production app, you would get user ID from an auth session
const url = new URL(req.url);
const userId = url.searchParams.get('userId') || "default-user";
console.log(`[Preferences] Fetching preferences for user: ${userId}`);
const userPrefs = await db.select().from(userPreferences).where(eq(userPreferences.userId, userId));
if (userPrefs.length === 0) {
console.log('[Preferences] No preferences found, returning defaults');
// Return default preferences if none exist
return Response.json({
categories: ['AI', 'Technology'],
languages: ['en'] // Default to English
});
}
// Handle backward compatibility for old schema versions
let languages = [];
if ('languages' in userPrefs[0] && userPrefs[0].languages) {
languages = userPrefs[0].languages;
} else if ('language' in userPrefs[0] && userPrefs[0].language) {
// Convert old single language to array for backward compatibility
languages = Array.isArray(userPrefs[0].language)
? userPrefs[0].language
: [userPrefs[0].language];
} else {
languages = ['en']; // Default to English if no language preference found
}
console.log(`[Preferences] Found user preferences: categories=${JSON.stringify(userPrefs[0].categories)}, languages=${JSON.stringify(languages)}`);
return Response.json({
categories: userPrefs[0].categories,
languages: languages
});
} catch (err: any) {
console.error(`[Preferences] Error getting user preferences: ${err instanceof Error ? err.message : String(err)}`);
console.error(`[Preferences] Error stack: ${err instanceof Error ? err.stack : 'No stack trace available'}`);
return Response.json(
{ message: 'An error has occurred' },
{ status: 500 }
);
}
};
// POST handler to save user preferences
export const POST = async (req: Request) => {
try {
console.log('[Preferences] Updating user preferences');
// In a production app, you would get user ID from an auth session
const url = new URL(req.url);
const userId = url.searchParams.get('userId') || "default-user";
const body = await req.json();
const { categories, languages } = body;
console.log(`[Preferences] Received update: userId=${userId}, categories=${JSON.stringify(categories)}, languages=${JSON.stringify(languages)}`);
if (!categories || !Array.isArray(categories)) {
console.error('[Preferences] Invalid categories format');
return Response.json(
{ message: 'Invalid categories format' },
{ status: 400 }
);
}
if (languages && !Array.isArray(languages)) {
console.error('[Preferences] Invalid languages format');
return Response.json(
{ message: 'Invalid languages format' },
{ status: 400 }
);
}
const userPrefs = await db.select().from(userPreferences).where(eq(userPreferences.userId, userId));
try {
if (userPrefs.length === 0) {
// Create new preferences
console.log(`[Preferences] Creating new preferences for user: ${userId}`);
await db.insert(userPreferences).values({
userId,
categories,
languages: languages || ['en'],
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
});
} else {
// Update existing preferences
console.log(`[Preferences] Updating existing preferences for user: ${userId}`);
await db.update(userPreferences)
.set({
categories,
languages: languages || ['en'],
updatedAt: new Date().toISOString()
})
.where(eq(userPreferences.userId, userId));
}
console.log(`[Preferences] Successfully updated preferences for user: ${userId}`);
} catch (error: any) {
// If there's an error (likely due to schema mismatch), log it but don't fail
console.warn(`[Preferences] Error updating preferences with new schema: ${error instanceof Error ? error.message : String(error)}`);
console.warn('[Preferences] Continuing with request despite error');
// We'll just return success anyway since we can't fix the schema issue here
}
return Response.json({ message: 'Preferences updated successfully' });
} catch (err: any) {
console.error(`[Preferences] Error updating user preferences: ${err instanceof Error ? err.message : String(err)}`);
console.error(`[Preferences] Error stack: ${err instanceof Error ? err.stack : 'No stack trace available'}`);
return Response.json(
{ message: 'An error has occurred' },
{ status: 500 }
);
}
};

View File

@ -1,83 +1,54 @@
import { getSearchQueriesForCategory, DEFAULT_SOURCES } from './categories'; import { searchSearxng } from '@/lib/searxng';
import { searchCategory, getDefaultResults, processResults } from './search';
const articleWebsites = [
'yahoo.com',
'www.exchangewire.com',
'businessinsider.com',
/* 'wired.com',
'mashable.com',
'theverge.com',
'gizmodo.com',
'cnet.com',
'venturebeat.com', */
];
const topics = ['AI', 'tech']; /* TODO: Add UI to customize this */
export const GET = async (req: Request) => { export const GET = async (req: Request) => {
try { try {
const url = new URL(req.url); const data = (
const category = url.searchParams.get('category'); await Promise.all([
const preferencesParam = url.searchParams.get('preferences'); ...new Array(articleWebsites.length * topics.length)
const languagesParam = url.searchParams.get('languages'); .fill(0)
.map(async (_, i) => {
console.log(`[Discover] Request received: category=${category}, preferences=${preferencesParam}, languages=${languagesParam}`); return (
await searchSearxng(
let data: any[] = []; `site:${articleWebsites[i % articleWebsites.length]} ${
let languages: string[] = []; topics[i % topics.length]
}`,
// Parse languages parameter {
if (languagesParam) { engines: ['bing news'],
try { pageno: 1,
const parsedLanguages = JSON.parse(languagesParam); },
if (Array.isArray(parsedLanguages)) { )
languages = parsedLanguages; ).results;
} }),
} catch (err) { ])
console.error(`[Discover] Error parsing languages: ${err instanceof Error ? err.message : String(err)}`); )
} .map((result) => result)
} .flat()
.sort(() => Math.random() - 0.5);
console.log(`[Discover] Using languages: ${JSON.stringify(languages)}`);
// Handle category-specific searches
if (category && category !== 'For You') {
console.log(`[Discover] Searching for category: ${category}`);
data = await searchCategory(category, languages, getSearchQueriesForCategory);
}
// Handle preference-based searches
else if (preferencesParam) {
try {
const preferences = JSON.parse(preferencesParam);
if (Array.isArray(preferences) && preferences.length > 0) {
console.log(`[Discover] Searching for preferences: ${JSON.stringify(preferences)}`);
// Get content for each preferred category
const categoryPromises = preferences.map((pref: string) =>
searchCategory(pref, languages, getSearchQueriesForCategory)
);
const results = await Promise.all(categoryPromises);
data = results.flat();
} else {
console.log(`[Discover] No valid preferences found, using default search`);
// Fallback to default behavior
data = await getDefaultResults(languages, DEFAULT_SOURCES);
}
} catch (err) {
console.error(`[Discover] Error with preferences: ${err instanceof Error ? err.message : String(err)}`);
data = await getDefaultResults(languages, DEFAULT_SOURCES);
}
}
// Default search behavior
else {
console.log(`[Discover] Using default search`);
data = await getDefaultResults(languages, DEFAULT_SOURCES);
}
console.log(`[Discover] Found ${data.length} results before filtering`);
// Process and filter results for display
const finalData = processResults(data);
console.log(`[Discover] Found ${finalData.length} results after filtering`);
return Response.json( return Response.json(
{ {
blogs: finalData, blogs: data,
}, },
{ {
status: 200, status: 200,
}, },
); );
} catch (err) { } catch (err) {
console.error(`[Discover] An error occurred in discover route: ${err instanceof Error ? err.message : String(err)}`); console.error(`An error occurred in discover route: ${err}`);
console.error(`[Discover] Error stack: ${err instanceof Error ? err.stack : 'No stack trace available'}`);
return Response.json( return Response.json(
{ {
message: 'An error has occurred', message: 'An error has occurred',

View File

@ -1,173 +0,0 @@
import { searchSearxng } from '@/lib/searxng';
import { LANGUAGE_SPECIFIC_ENGINES } from './languages';
// Define the search options interface to match the one in lib/searxng.ts
interface SearxngSearchOptions {
categories?: string[];
engines?: string[];
language?: string;
pageno?: number;
}
/**
* Default search engines to use, in priority order
*/
export const DEFAULT_ENGINES = ['bing news', 'brave news', 'duckduckgo news'];
/**
* Search with multiple engines as fallbacks
* Tries each engine in sequence until results are found or engines exhausted
*/
export async function searchWithMultipleEngines(
query: string,
language: string,
engines: string[] = DEFAULT_ENGINES
): Promise<any[]> {
let allResults: any[] = [];
let hasResults = false;
// Try each engine in sequence until we get results or run out of engines
for (const engine of engines) {
try {
console.log(`[Discover] Trying engine "${engine}" for query "${query}" in language "${language || 'default'}"`);
const searchOptions: SearxngSearchOptions = {
engines: [engine],
pageno: 1,
};
if (language) {
searchOptions.language = language;
}
const result = await searchSearxng(query, searchOptions);
if (result.results && result.results.length > 0) {
console.log(`[Discover] Found ${result.results.length} results from engine "${engine}"`);
allResults.push(...result.results);
hasResults = true;
// If we've found enough results, stop trying more engines
if (allResults.length >= 20) {
break;
}
} else {
console.log(`[Discover] No results from engine "${engine}", trying next engine if available`);
}
} catch (err) {
console.error(`[Discover] Error searching with engine "${engine}": ${err instanceof Error ? err.message : String(err)}`);
}
}
return allResults;
}
/**
* Search for a specific category across multiple languages and engines
*/
export async function searchCategory(
category: string,
languages: string[] = [],
getQueries: (cat: string, lang?: string) => { site: string; keyword: string }[]
): Promise<any[]> {
console.log(`[Discover] Searching category "${category}" in languages: ${JSON.stringify(languages)}`);
// If no languages specified or empty array, search in English
if (!languages || languages.length === 0) {
const queries = getQueries(category);
const searchPromises = queries.map(query =>
searchWithMultipleEngines(`site:${query.site} ${query.keyword}`, '')
);
const results = await Promise.all(searchPromises);
return results.flat();
}
// If languages specified, search each language and combine results
const allResults = [];
for (const language of languages) {
console.log(`[Discover] Searching in language: ${language}`);
// Get language-specific engines if available, otherwise use defaults
const engines = LANGUAGE_SPECIFIC_ENGINES[language] || DEFAULT_ENGINES;
// Get language-specific queries
const queries = getQueries(category, language);
const searchPromises = queries.map(query => {
// For Chinese languages, don't use the site: operator
const isChinese = language.startsWith('zh');
const queryString = isChinese ? query.keyword : `site:${query.site} ${query.keyword}`;
return searchWithMultipleEngines(queryString, language, engines);
});
const results = await Promise.all(searchPromises);
allResults.push(...results.flat());
}
return allResults;
}
/**
* Helper function for default search behavior that supports multiple languages
*/
export async function getDefaultResults(
languages: string[] = [],
defaultSources: { site: string; keyword: string }[]
): Promise<any[]> {
console.log(`[Discover] Getting default results for languages: ${JSON.stringify(languages)}`);
// If no languages specified, search with no language filter
if (languages.length === 0) {
const searchPromises = defaultSources.map(query =>
searchWithMultipleEngines(`site:${query.site} ${query.keyword}`, '')
);
const results = await Promise.all(searchPromises);
return results.flat();
}
// Otherwise, search each language separately and combine results
let allResults: any[] = [];
for (const language of languages) {
console.log(`[Discover] Default search in language: ${language}`);
// Get language-specific engines if available, otherwise use defaults
const engines = LANGUAGE_SPECIFIC_ENGINES[language] || DEFAULT_ENGINES;
const searchPromises = defaultSources.map(query => {
// For Chinese languages, don't use the site: operator
const isChinese = language.startsWith('zh');
const queryString = isChinese ? query.keyword : `site:${query.site} ${query.keyword}`;
return searchWithMultipleEngines(queryString, language, engines);
});
const results = await Promise.all(searchPromises);
allResults.push(...results.flat());
}
return allResults;
}
/**
* Process results to filter and prepare for display
*/
export function processResults(results: any[]): any[] {
// Filter out items without thumbnails
const resultsWithThumbnails = results.filter((item) => item.thumbnail);
// If there are no results with thumbnails but we have results without thumbnails,
// use some of the results without thumbnails rather than showing nothing
let finalResults = resultsWithThumbnails;
if (resultsWithThumbnails.length === 0 && results.length > 0) {
console.log(`[Discover] No results with thumbnails found, using up to 10 results without thumbnails`);
finalResults = results.slice(0, 10); // Limit to 10 results without thumbnails
} else {
finalResults = resultsWithThumbnails;
}
// Shuffle the results
return finalResults.sort(() => Math.random() - 0.5);
}

View File

@ -1,7 +1,7 @@
'use client'; 'use client';
import { Search, Sliders, ChevronLeft, ChevronRight } from 'lucide-react'; import { Search } from 'lucide-react';
import { useEffect, useState, useRef, memo, useMemo } from 'react'; import { useEffect, useState } from 'react';
import Link from 'next/link'; import Link from 'next/link';
import { toast } from 'sonner'; import { toast } from 'sonner';
@ -12,165 +12,14 @@ interface Discover {
thumbnail: string; thumbnail: string;
} }
const categories = [ const Page = () => {
'For You', 'AI', 'Technology', 'Current News', 'Sports',
'Money', 'Gaming', 'Entertainment', 'Art and Culture',
'Science', 'Health', 'Travel'
];
// Header component with categories
const DiscoverHeader = memo(({
activeCategory,
setActiveCategory,
setShowPreferences,
userPreferences
}: {
activeCategory: string;
setActiveCategory: (category: string) => void;
setShowPreferences: (show: boolean) => void;
userPreferences: string[];
}) => {
const categoryContainerRef = useRef<HTMLDivElement>(null);
// Filter categories to show only what the user has selected in preferences
// Always include "For You" and the currently active category if it's not in preferences
const visibleCategories = useMemo(() => {
// Always start with "For You"
const filtered = ['For You'];
// Add user's preferred categories
userPreferences.forEach(category => {
if (!filtered.includes(category)) {
filtered.push(category);
}
});
// Add active category if it's not already included
if (activeCategory && !filtered.includes(activeCategory)) {
filtered.push(activeCategory);
}
// If user has no preferences, show a limited default set
if (filtered.length <= 1) {
return ['For You', 'AI', 'Technology', 'Current News'];
}
return filtered;
}, [userPreferences, activeCategory]);
const scrollCategories = (direction: 'left' | 'right') => {
const container = categoryContainerRef.current;
if (!container) return;
const scrollAmount = container.clientWidth * 0.8;
const currentScroll = container.scrollLeft;
container.scrollTo({
left: direction === 'left'
? Math.max(0, currentScroll - scrollAmount)
: currentScroll + scrollAmount,
behavior: 'smooth'
});
};
return (
<div className="flex flex-col pt-4">
<div className="flex items-center justify-between">
<div className="flex items-center">
<Search />
<h1 className="text-3xl font-medium p-2">Discover</h1>
</div>
<button
className="p-2 rounded-full bg-light-secondary dark:bg-dark-secondary hover:bg-light-primary hover:dark:bg-dark-primary transition-colors"
onClick={() => setShowPreferences(true)}
aria-label="Personalize"
>
<Sliders size={20} />
</button>
</div>
<div className="relative flex items-center py-4">
<button
className="absolute left-0 z-10 p-1 rounded-full bg-light-secondary dark:bg-dark-secondary hover:bg-light-primary/80 hover:dark:bg-dark-primary/80 transition-colors"
onClick={() => scrollCategories('left')}
aria-label="Scroll left"
>
<ChevronLeft size={20} />
</button>
<div
className="flex overflow-x-auto mx-8 no-scrollbar scroll-smooth"
ref={categoryContainerRef}
style={{ scrollbarWidth: 'none' }} // For Firefox
>
<div className="flex space-x-2">
{visibleCategories.map((category) => (
<button
key={category}
className={`px-4 py-2 rounded-full whitespace-nowrap transition-colors ${
activeCategory === category
? 'bg-light-primary dark:bg-dark-primary text-white'
: 'bg-light-secondary dark:bg-dark-secondary hover:bg-light-primary/80 hover:dark:bg-dark-primary/80'
}`}
onClick={() => setActiveCategory(category)}
>
{category}
</button>
))}
</div>
</div>
<button
className="absolute right-0 z-10 p-1 rounded-full bg-light-secondary dark:bg-dark-secondary hover:bg-light-primary/80 hover:dark:bg-dark-primary/80 transition-colors"
onClick={() => scrollCategories('right')}
aria-label="Scroll right"
>
<ChevronRight size={20} />
</button>
</div>
<hr className="border-t border-[#2B2C2C] my-4 w-full" />
</div>
);
});
DiscoverHeader.displayName = 'DiscoverHeader';
// Content component that displays articles
const DiscoverContent = memo(({
activeCategory,
userPreferences,
preferredLanguages
}: {
activeCategory: string;
userPreferences: string[];
preferredLanguages: string[];
}) => {
const [discover, setDiscover] = useState<Discover[] | null>(null); const [discover, setDiscover] = useState<Discover[] | null>(null);
const [contentLoading, setContentLoading] = useState(true); const [loading, setLoading] = useState(true);
useEffect(() => { useEffect(() => {
const fetchData = async () => { const fetchData = async () => {
setContentLoading(true);
try { try {
let endpoint = `/api/discover`; const res = await fetch(`/api/discover`, {
let params = [];
if (activeCategory !== 'For You') {
params.push(`category=${encodeURIComponent(activeCategory)}`);
} else if (userPreferences.length > 0) {
params.push(`preferences=${encodeURIComponent(JSON.stringify(userPreferences))}`);
}
if (preferredLanguages.length > 0) {
params.push(`languages=${encodeURIComponent(JSON.stringify(preferredLanguages))}`);
}
if (params.length > 0) {
endpoint += `?${params.join('&')}`;
}
const res = await fetch(endpoint, {
method: 'GET', method: 'GET',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -183,7 +32,6 @@ const DiscoverContent = memo(({
throw new Error(data.message); throw new Error(data.message);
} }
// Filter out items without thumbnails (double-checking)
data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail);
setDiscover(data.blogs); setDiscover(data.blogs);
@ -191,326 +39,74 @@ const DiscoverContent = memo(({
console.error('Error fetching data:', err.message); console.error('Error fetching data:', err.message);
toast.error('Error fetching data'); toast.error('Error fetching data');
} finally { } finally {
setContentLoading(false); setLoading(false);
} }
}; };
fetchData(); fetchData();
}, [activeCategory, userPreferences, preferredLanguages]);
if (contentLoading) {
return (
<div className="flex flex-row items-center justify-center py-20">
<svg
aria-hidden="true"
className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]"
viewBox="0 0 100 101"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z"
fill="currentColor"
/>
<path
d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z"
fill="currentFill"
/>
</svg>
</div>
);
}
if (!discover || discover.length === 0) {
return (
<div className="flex flex-row items-center justify-center min-h-[50vh]">
<p className="text-black/70 dark:text-white/70 text-sm">
No content found for this category.
</p>
</div>
);
}
return (
<div className="grid lg:grid-cols-3 sm:grid-cols-2 grid-cols-1 gap-4 pb-28 lg:pb-8 w-full justify-items-center lg:justify-items-start">
{discover.map((item, i) => (
<Link
href={`/?q=Summary: ${item.url}`}
key={i}
className="max-w-sm rounded-lg overflow-hidden bg-light-secondary dark:bg-dark-secondary hover:-translate-y-[1px] transition duration-200"
target="_blank"
>
{/* Using img tag with URL processing for thumbnails */}
<img
className="object-cover w-full aspect-video"
src={
new URL(item.thumbnail).origin +
new URL(item.thumbnail).pathname +
`?id=${new URL(item.thumbnail).searchParams.get('id')}`
}
alt={item.title}
/>
<div className="px-6 py-4">
<div className="font-bold text-lg mb-2">
{item.title.slice(0, 100)}...
</div>
<p className="text-black-70 dark:text-white/70 text-sm">
{item.content.slice(0, 100)}...
</p>
</div>
</Link>
))}
</div>
);
});
DiscoverContent.displayName = 'DiscoverContent';
// Preferences modal for personalization
const PreferencesModal = memo(({
showPreferences,
setShowPreferences,
userPreferences,
setUserPreferences,
preferredLanguages,
setPreferredLanguages,
setActiveCategory
}: {
showPreferences: boolean;
setShowPreferences: (show: boolean) => void;
userPreferences: string[];
setUserPreferences: (prefs: string[]) => void;
preferredLanguages: string[];
setPreferredLanguages: (langs: string[]) => void;
setActiveCategory: (category: string) => void;
}) => {
const [tempPreferences, setTempPreferences] = useState<string[]>([]);
const [tempLanguages, setTempLanguages] = useState<string[]>([]);
useEffect(() => {
if (showPreferences) {
setTempPreferences([...userPreferences]);
setTempLanguages([...preferredLanguages]);
}
}, [showPreferences, userPreferences, preferredLanguages]);
const saveUserPreferences = async (preferences: string[], languages: string[]) => {
try {
const res = await fetch(`/api/discover/preferences`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
categories: preferences,
languages
}),
});
if (res.ok) {
toast.success('Preferences saved successfully');
} else {
const data = await res.json();
throw new Error(data.message);
}
} catch (err: any) {
console.error('Error saving preferences:', err.message);
toast.error('Error saving preferences');
}
};
if (!showPreferences) return null;
return (
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div className="bg-white dark:bg-[#1E1E1E] p-6 rounded-lg w-full max-w-md">
<h2 className="text-xl font-bold mb-4">Personalize Your Feed</h2>
<h3 className="font-medium mb-2">Select categories you&apos;re interested in:</h3>
<div className="grid grid-cols-2 gap-2 mb-6">
{categories.filter(c => c !== 'For You').map((category) => (
<button
key={category}
onClick={() => {
if (tempPreferences.includes(category)) {
setTempPreferences(tempPreferences.filter(p => p !== category));
} else {
setTempPreferences([...tempPreferences, category]);
}
}}
className={`px-3 py-2 rounded-md text-left transition-colors border ${
tempPreferences.includes(category)
? 'bg-blue-500 border-blue-500 text-white'
: 'bg-light-secondary dark:bg-dark-secondary border-gray-400 dark:border-gray-600 hover:border-blue-400 dark:hover:border-blue-400'
}`}
>
{category}
</button>
))}
</div>
<div className="mb-6">
<h3 className="font-medium mb-2">Preferred Languages</h3>
<div className="grid grid-cols-2 gap-2">
{[
{ code: 'en', name: 'English' },
{ code: 'ar', name: 'Arabic' },
{ code: 'zh', name: 'Chinese' },
{ code: 'fr', name: 'French' },
{ code: 'de', name: 'German' },
{ code: 'hi', name: 'Hindi' },
{ code: 'it', name: 'Italian' },
{ code: 'ja', name: 'Japanese' },
{ code: 'ko', name: 'Korean' },
{ code: 'pt', name: 'Portuguese' },
{ code: 'ru', name: 'Russian' },
{ code: 'es', name: 'Spanish' },
].map((language) => (
<button
key={language.code}
onClick={() => {
if (tempLanguages.includes(language.code)) {
setTempLanguages(tempLanguages.filter(l => l !== language.code));
} else {
setTempLanguages([...tempLanguages, language.code]);
}
}}
className={`px-3 py-2 rounded-md text-left transition-colors border ${
tempLanguages.includes(language.code)
? 'bg-blue-500 border-blue-500 text-white'
: 'bg-light-secondary dark:bg-dark-secondary border-gray-400 dark:border-gray-600 hover:border-blue-400 dark:hover:border-blue-400'
}`}
>
{language.name}
</button>
))}
</div>
<p className="text-sm text-gray-500 mt-2">
{tempLanguages.length === 0
? "No languages selected will show results in all languages"
: `Selected: ${tempLanguages.length} language(s)`}
</p>
</div>
<div className="flex justify-end space-x-2">
<button
className="px-4 py-2 rounded bg-gray-300 dark:bg-gray-700 hover:bg-gray-400 dark:hover:bg-gray-600 transition-colors"
onClick={() => {
setShowPreferences(false);
// Reset temp preferences
setTempPreferences([]);
setTempLanguages([]);
}}
>
Cancel
</button>
<button
className="px-4 py-2 rounded bg-light-primary dark:bg-dark-primary text-white hover:bg-light-primary/80 hover:dark:bg-dark-primary/80 transition-colors"
onClick={async () => {
await saveUserPreferences(tempPreferences, tempLanguages);
// Update the actual preferences after saving
setUserPreferences(tempPreferences);
setPreferredLanguages(tempLanguages);
setShowPreferences(false);
setActiveCategory('For You'); // Switch to For You view to show personalized content
// Reset temp preferences
setTempPreferences([]);
setTempLanguages([]);
}}
>
Save
</button>
</div>
</div>
</div>
);
});
PreferencesModal.displayName = 'PreferencesModal';
// Main page component
const Page = () => {
const [activeCategory, setActiveCategory] = useState('For You');
const [showPreferences, setShowPreferences] = useState(false);
const [userPreferences, setUserPreferences] = useState<string[]>(['AI', 'Technology']);
const [preferredLanguages, setPreferredLanguages] = useState<string[]>(['en']);
const [initialLoading, setInitialLoading] = useState(true);
// Load user preferences on initial render
useEffect(() => {
const loadUserPreferences = async () => {
try {
const res = await fetch(`/api/discover/preferences`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (res.ok) {
const data = await res.json();
setUserPreferences(data.categories || ['AI', 'Technology']);
setPreferredLanguages(data.languages || ['en']);
}
} catch (err: any) {
console.error('Error loading preferences:', err.message);
} finally {
setInitialLoading(false);
}
};
loadUserPreferences();
}, []); }, []);
if (initialLoading) { return loading ? (
return ( <div className="flex flex-row items-center justify-center min-h-screen">
<div className="flex flex-row items-center justify-center min-h-screen"> <svg
<svg aria-hidden="true"
aria-hidden="true" className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]"
className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]" viewBox="0 0 100 101"
viewBox="0 0 100 101" fill="none"
fill="none" xmlns="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg" >
> <path
<path d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z"
d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z" fill="currentColor"
fill="currentColor" />
/> <path
<path d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z"
d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z" fill="currentFill"
fill="currentFill" />
/> </svg>
</svg>
</div>
);
}
return (
<div>
<DiscoverHeader
activeCategory={activeCategory}
setActiveCategory={setActiveCategory}
setShowPreferences={setShowPreferences}
userPreferences={userPreferences}
/>
<DiscoverContent
activeCategory={activeCategory}
userPreferences={userPreferences}
preferredLanguages={preferredLanguages}
/>
<PreferencesModal
showPreferences={showPreferences}
setShowPreferences={setShowPreferences}
userPreferences={userPreferences}
setUserPreferences={setUserPreferences}
preferredLanguages={preferredLanguages}
setPreferredLanguages={setPreferredLanguages}
setActiveCategory={setActiveCategory}
/>
</div> </div>
) : (
<>
<div>
<div className="flex flex-col pt-4">
<div className="flex items-center">
<Search />
<h1 className="text-3xl font-medium p-2">Discover</h1>
</div>
<hr className="border-t border-[#2B2C2C] my-4 w-full" />
</div>
<div className="grid lg:grid-cols-3 sm:grid-cols-2 grid-cols-1 gap-4 pb-28 lg:pb-8 w-full justify-items-center lg:justify-items-start">
{discover &&
discover?.map((item, i) => (
<Link
href={`/?q=Summary: ${item.url}`}
key={i}
className="max-w-sm rounded-lg overflow-hidden bg-light-secondary dark:bg-dark-secondary hover:-translate-y-[1px] transition duration-200"
target="_blank"
>
<img
className="object-cover w-full aspect-video"
src={
new URL(item.thumbnail).origin +
new URL(item.thumbnail).pathname +
`?id=${new URL(item.thumbnail).searchParams.get('id')}`
}
alt={item.title}
/>
<div className="px-6 py-4">
<div className="font-bold text-lg mb-2">
{item.title.slice(0, 100)}...
</div>
<p className="text-black-70 dark:text-white/70 text-sm">
{item.content.slice(0, 100)}...
</p>
</div>
</Link>
))}
</div>
</div>
</>
); );
}; };

View File

@ -1,12 +1,10 @@
'use client'; 'use client';
import DeleteChat from '@/components/DeleteChat'; import DeleteChat from '@/components/DeleteChat';
import BatchDeleteChats from '@/components/BatchDeleteChats';
import { cn, formatTimeDifference } from '@/lib/utils'; import { cn, formatTimeDifference } from '@/lib/utils';
import { BookOpenText, Check, ClockIcon, Delete, ScanEye, Search, X } from 'lucide-react'; import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react';
import Link from 'next/link'; import Link from 'next/link';
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { toast } from 'sonner';
export interface Chat { export interface Chat {
id: string; id: string;
@ -17,13 +15,7 @@ export interface Chat {
const Page = () => { const Page = () => {
const [chats, setChats] = useState<Chat[]>([]); const [chats, setChats] = useState<Chat[]>([]);
const [filteredChats, setFilteredChats] = useState<Chat[]>([]);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [searchQuery, setSearchQuery] = useState('');
const [selectionMode, setSelectionMode] = useState(false);
const [selectedChats, setSelectedChats] = useState<string[]>([]);
const [hoveredChatId, setHoveredChatId] = useState<string | null>(null);
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false);
useEffect(() => { useEffect(() => {
const fetchChats = async () => { const fetchChats = async () => {
@ -39,71 +31,12 @@ const Page = () => {
const data = await res.json(); const data = await res.json();
setChats(data.chats); setChats(data.chats);
setFilteredChats(data.chats);
setLoading(false); setLoading(false);
}; };
fetchChats(); fetchChats();
}, []); }, []);
useEffect(() => {
if (searchQuery.trim() === '') {
setFilteredChats(chats);
} else {
const filtered = chats.filter((chat) =>
chat.title.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredChats(filtered);
}
}, [searchQuery, chats]);
const handleSearchChange = (e: React.ChangeEvent<HTMLInputElement>) => {
setSearchQuery(e.target.value);
};
const clearSearch = () => {
setSearchQuery('');
};
const toggleSelectionMode = () => {
setSelectionMode(!selectionMode);
setSelectedChats([]);
};
const toggleChatSelection = (chatId: string) => {
if (selectedChats.includes(chatId)) {
setSelectedChats(selectedChats.filter(id => id !== chatId));
} else {
setSelectedChats([...selectedChats, chatId]);
}
};
const selectAllChats = () => {
if (selectedChats.length === filteredChats.length) {
setSelectedChats([]);
} else {
setSelectedChats(filteredChats.map(chat => chat.id));
}
};
const deleteSelectedChats = () => {
if (selectedChats.length === 0) return;
setIsDeleteDialogOpen(true);
};
const handleBatchDeleteComplete = () => {
setSelectedChats([]);
setSelectionMode(false);
};
const updateChatsAfterDelete = (newChats: Chat[]) => {
setChats(newChats);
setFilteredChats(newChats.filter(chat =>
searchQuery.trim() === '' ||
chat.title.toLowerCase().includes(searchQuery.toLowerCase())
));
};
return loading ? ( return loading ? (
<div className="flex flex-row items-center justify-center min-h-screen"> <div className="flex flex-row items-center justify-center min-h-screen">
<svg <svg
@ -131,145 +64,32 @@ const Page = () => {
<h1 className="text-3xl font-medium p-2">Library</h1> <h1 className="text-3xl font-medium p-2">Library</h1>
</div> </div>
<hr className="border-t border-[#2B2C2C] my-4 w-full" /> <hr className="border-t border-[#2B2C2C] my-4 w-full" />
{/* Search Box */}
<div className="relative mt-6 mb-6">
<div className="absolute inset-y-0 left-0 flex items-center pl-3 pointer-events-none">
<Search className="w-5 h-5 text-black/50 dark:text-white/50" />
</div>
<input
type="text"
className="block w-full p-2 pl-10 pr-10 bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 rounded-md text-black dark:text-white focus:outline-none focus:ring-1 focus:ring-blue-500"
placeholder="Search your threads..."
value={searchQuery}
onChange={handleSearchChange}
/>
{searchQuery && (
<button
onClick={clearSearch}
className="absolute inset-y-0 right-0 flex items-center pr-3"
>
<X className="w-5 h-5 text-black/50 dark:text-white/50 hover:text-black dark:hover:text-white" />
</button>
)}
</div>
{/* Thread Count and Selection Controls */}
<div className="mb-4">
{!selectionMode ? (
<div className="flex items-center justify-between">
<div className="text-black/70 dark:text-white/70">
You have {chats.length} threads in Perplexica
</div>
<button
onClick={toggleSelectionMode}
className="text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white text-sm transition duration-200"
>
Select
</button>
</div>
) : (
<div className="flex items-center justify-between">
<div className="text-black/70 dark:text-white/70">
{selectedChats.length} selected thread{selectedChats.length !== 1 ? 's' : ''}
</div>
<div className="flex space-x-4">
<button
onClick={selectAllChats}
className="text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white text-sm transition duration-200"
>
{selectedChats.length === filteredChats.length ? 'Deselect all' : 'Select all'}
</button>
<button
onClick={toggleSelectionMode}
className="text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white text-sm transition duration-200"
>
Cancel
</button>
<button
onClick={deleteSelectedChats}
disabled={selectedChats.length === 0}
className={cn(
"text-sm transition duration-200",
selectedChats.length === 0
? "text-red-400/50 hover:text-red-500/50 cursor-not-allowed"
: "text-red-400 hover:text-red-500 cursor-pointer"
)}
>
Delete Selected
</button>
</div>
</div>
)}
</div>
</div> </div>
{chats.length === 0 && (
{filteredChats.length === 0 && ( <div className="flex flex-row items-center justify-center min-h-screen">
<div className="flex flex-row items-center justify-center min-h-[50vh]">
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
{searchQuery ? 'No threads found matching your search.' : 'No threads found.'} No chats found.
</p> </p>
</div> </div>
)} )}
{chats.length > 0 && (
{filteredChats.length > 0 && (
<div className="flex flex-col pb-20 lg:pb-2"> <div className="flex flex-col pb-20 lg:pb-2">
{filteredChats.map((chat, i) => ( {chats.map((chat, i) => (
<div <div
className={cn( className={cn(
'flex flex-col space-y-4 py-6', 'flex flex-col space-y-4 py-6',
i !== filteredChats.length - 1 i !== chats.length - 1
? 'border-b border-white-200 dark:border-dark-200' ? 'border-b border-white-200 dark:border-dark-200'
: '', : '',
)} )}
key={i} key={i}
onMouseEnter={() => setHoveredChatId(chat.id)}
onMouseLeave={() => setHoveredChatId(null)}
> >
<div className="flex items-center"> <Link
{/* Checkbox - visible when in selection mode or when hovering */} href={`/c/${chat.id}`}
{(selectionMode || hoveredChatId === chat.id) && ( className="text-black dark:text-white lg:text-xl font-medium truncate transition duration-200 hover:text-[#24A0ED] dark:hover:text-[#24A0ED] cursor-pointer"
<div >
className="mr-3 cursor-pointer" {chat.title}
onClick={(e) => { </Link>
e.preventDefault();
if (!selectionMode) setSelectionMode(true);
toggleChatSelection(chat.id);
}}
>
<div className={cn(
"w-5 h-5 border rounded flex items-center justify-center transition-colors",
selectedChats.includes(chat.id)
? "bg-blue-500 border-blue-500"
: "border-gray-400 dark:border-gray-600"
)}>
{selectedChats.includes(chat.id) && (
<Check className="w-4 h-4 text-white" />
)}
</div>
</div>
)}
{/* Chat Title */}
<Link
href={`/c/${chat.id}`}
className={cn(
"text-black dark:text-white lg:text-xl font-medium truncate transition duration-200 hover:text-[#24A0ED] dark:hover:text-[#24A0ED] cursor-pointer",
selectionMode && "pointer-events-none text-black dark:text-white hover:text-black dark:hover:text-white"
)}
onClick={(e) => {
if (selectionMode) {
e.preventDefault();
toggleChatSelection(chat.id);
}
}}
>
{chat.title}
</Link>
</div>
<div className="flex flex-row items-center justify-between w-full"> <div className="flex flex-row items-center justify-between w-full">
<div className="flex flex-row items-center space-x-1 lg:space-x-1.5 text-black/70 dark:text-white/70"> <div className="flex flex-row items-center space-x-1 lg:space-x-1.5 text-black/70 dark:text-white/70">
<ClockIcon size={15} /> <ClockIcon size={15} />
@ -277,30 +97,16 @@ const Page = () => {
{formatTimeDifference(new Date(), chat.createdAt)} Ago {formatTimeDifference(new Date(), chat.createdAt)} Ago
</p> </p>
</div> </div>
<DeleteChat
{/* Delete button - only visible when not in selection mode */} chatId={chat.id}
{!selectionMode && ( chats={chats}
<DeleteChat setChats={setChats}
chatId={chat.id} />
chats={chats}
setChats={updateChatsAfterDelete}
/>
)}
</div> </div>
</div> </div>
))} ))}
</div> </div>
)} )}
{/* Batch Delete Confirmation Dialog */}
<BatchDeleteChats
chatIds={selectedChats}
chats={chats}
setChats={updateChatsAfterDelete}
onComplete={handleBatchDeleteComplete}
isOpen={isDeleteDialogOpen}
setIsOpen={setIsDeleteDialogOpen}
/>
</div> </div>
); );
}; };

View File

@ -7,7 +7,6 @@ import { Switch } from '@headlessui/react';
import ThemeSwitcher from '@/components/theme/Switcher'; import ThemeSwitcher from '@/components/theme/Switcher';
import { ImagesIcon, VideoIcon } from 'lucide-react'; import { ImagesIcon, VideoIcon } from 'lucide-react';
import Link from 'next/link'; import Link from 'next/link';
import { PROVIDER_METADATA } from '@/lib/providers';
interface SettingsType { interface SettingsType {
chatModelProviders: { chatModelProviders: {
@ -548,8 +547,9 @@ const Page = () => {
options={Object.keys(config.chatModelProviders).map( options={Object.keys(config.chatModelProviders).map(
(provider) => ({ (provider) => ({
value: provider, value: provider,
label: (PROVIDER_METADATA as any)[provider]?.displayName || label:
provider.charAt(0).toUpperCase() + provider.slice(1), provider.charAt(0).toUpperCase() +
provider.slice(1),
}), }),
)} )}
/> />
@ -689,8 +689,9 @@ const Page = () => {
options={Object.keys(config.embeddingModelProviders).map( options={Object.keys(config.embeddingModelProviders).map(
(provider) => ({ (provider) => ({
value: provider, value: provider,
label: (PROVIDER_METADATA as any)[provider]?.displayName || label:
provider.charAt(0).toUpperCase() + provider.slice(1), provider.charAt(0).toUpperCase() +
provider.slice(1),
}), }),
)} )}
/> />

View File

@ -1,118 +0,0 @@
import {
Description,
Dialog,
DialogBackdrop,
DialogPanel,
DialogTitle,
Transition,
TransitionChild,
} from '@headlessui/react';
import { Fragment, useState } from 'react';
import { toast } from 'sonner';
import { Chat } from '@/app/library/page';
interface BatchDeleteChatsProps {
chatIds: string[];
chats: Chat[];
setChats: (chats: Chat[]) => void;
onComplete: () => void;
isOpen: boolean;
setIsOpen: (isOpen: boolean) => void;
}
const BatchDeleteChats = ({
chatIds,
chats,
setChats,
onComplete,
isOpen,
setIsOpen,
}: BatchDeleteChatsProps) => {
const [loading, setLoading] = useState(false);
const handleDelete = async () => {
if (chatIds.length === 0) return;
setLoading(true);
try {
for (const chatId of chatIds) {
await fetch(`/api/chats/${chatId}`, {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
},
});
}
const newChats = chats.filter(chat => !chatIds.includes(chat.id));
setChats(newChats);
toast.success(`${chatIds.length} thread${chatIds.length > 1 ? 's' : ''} deleted`);
onComplete();
} catch (err: any) {
toast.error('Failed to delete threads');
} finally {
setIsOpen(false);
setLoading(false);
}
};
return (
<Transition appear show={isOpen} as={Fragment}>
<Dialog
as="div"
className="relative z-50"
onClose={() => {
if (!loading) {
setIsOpen(false);
}
}}
>
<DialogBackdrop className="fixed inset-0 bg-black/30" />
<div className="fixed inset-0 overflow-y-auto">
<div className="flex min-h-full items-center justify-center p-4 text-center">
<TransitionChild
as={Fragment}
enter="ease-out duration-200"
enterFrom="opacity-0 scale-95"
enterTo="opacity-100 scale-100"
leave="ease-in duration-100"
leaveFrom="opacity-100 scale-200"
leaveTo="opacity-0 scale-95"
>
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<DialogTitle className="text-lg font-medium leading-6 dark:text-white">
Delete Confirmation
</DialogTitle>
<Description className="text-sm dark:text-white/70 text-black/70">
Are you sure you want to delete {chatIds.length} selected thread{chatIds.length !== 1 ? 's' : ''}?
</Description>
<div className="flex flex-row items-end justify-end space-x-4 mt-6">
<button
onClick={() => {
if (!loading) {
setIsOpen(false);
}
}}
className="text-black/50 dark:text-white/50 text-sm hover:text-black/70 hover:dark:text-white/70 transition duration-200"
>
Cancel
</button>
<button
onClick={handleDelete}
className="text-red-400 text-sm hover:text-red-500 transition duration-200"
disabled={loading}
>
Delete
</button>
</div>
</DialogPanel>
</TransitionChild>
</div>
</div>
</Dialog>
</Transition>
);
};
export default BatchDeleteChats;

View File

@ -363,20 +363,18 @@ const ChatWindow = ({ id }: { id?: string }) => {
if (data.type === 'sources') { if (data.type === 'sources') {
sources = data.data; sources = data.data;
if (!added) { setMessages((prevMessages) => [
setMessages((prevMessages) => [ ...prevMessages,
...prevMessages, {
{ content: '',
content: '', messageId: data.messageId,
messageId: data.messageId, chatId: chatId!,
chatId: chatId!, role: 'assistant',
role: 'assistant', sources: sources,
sources: sources, createdAt: new Date(),
createdAt: new Date(), },
}, ]);
]); added = true;
added = true;
}
setMessageAppeared(true); setMessageAppeared(true);
} }
@ -394,20 +392,20 @@ const ChatWindow = ({ id }: { id?: string }) => {
}, },
]); ]);
added = true; added = true;
setMessageAppeared(true);
} else {
setMessages((prev) =>
prev.map((message) => {
if (message.messageId === data.messageId) {
return { ...message, content: message.content + data.data };
}
return message;
}),
);
} }
setMessages((prev) =>
prev.map((message) => {
if (message.messageId === data.messageId) {
return { ...message, content: message.content + data.data };
}
return message;
}),
);
recievedMessage += data.data; recievedMessage += data.data;
setMessageAppeared(true);
} }
if (data.type === 'messageEnd') { if (data.type === 'messageEnd') {

View File

@ -97,7 +97,6 @@ const MessageBox = ({
}, },
), ),
); );
setSpeechMessage(message.content.replace(regex, ''));
return; return;
} }

View File

@ -76,13 +76,11 @@ const Optimization = ({
<PopoverButton <PopoverButton
onClick={() => setOptimizationMode(mode.key)} onClick={() => setOptimizationMode(mode.key)}
key={i} key={i}
disabled={mode.key === 'quality'}
className={cn( className={cn(
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition', 'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition',
optimizationMode === mode.key optimizationMode === mode.key
? 'bg-light-secondary dark:bg-dark-secondary' ? 'bg-light-secondary dark:bg-dark-secondary'
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary', : 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
mode.key === 'quality' && 'opacity-50 cursor-not-allowed',
)} )}
> >
<div className="flex flex-row items-center space-x-1 text-black dark:text-white"> <div className="flex flex-row items-center space-x-1 text-black dark:text-white">

View File

@ -1,14 +1,7 @@
import fs from 'fs';
import path from 'path';
import toml from '@iarna/toml'; import toml from '@iarna/toml';
// Use dynamic imports for Node.js modules to prevent client-side errors
let fs: any;
let path: any;
if (typeof window === 'undefined') {
// We're on the server
fs = require('fs');
path = require('path');
}
const configFileName = 'config.toml'; const configFileName = 'config.toml';
interface Config { interface Config {
@ -35,9 +28,6 @@ interface Config {
DEEPSEEK: { DEEPSEEK: {
API_KEY: string; API_KEY: string;
}; };
LM_STUDIO: {
API_URL: string;
};
CUSTOM_OPENAI: { CUSTOM_OPENAI: {
API_URL: string; API_URL: string;
API_KEY: string; API_KEY: string;
@ -53,17 +43,10 @@ type RecursivePartial<T> = {
[P in keyof T]?: RecursivePartial<T[P]>; [P in keyof T]?: RecursivePartial<T[P]>;
}; };
const loadConfig = () => { const loadConfig = () =>
// Server-side only toml.parse(
if (typeof window === 'undefined') { fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
return toml.parse( ) as any as Config;
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
) as any as Config;
}
// Client-side fallback - settings will be loaded via API
return {} as Config;
};
export const getSimilarityMeasure = () => export const getSimilarityMeasure = () =>
loadConfig().GENERAL.SIMILARITY_MEASURE; loadConfig().GENERAL.SIMILARITY_MEASURE;
@ -94,8 +77,6 @@ export const getCustomOpenaiApiUrl = () =>
export const getCustomOpenaiModelName = () => export const getCustomOpenaiModelName = () =>
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME; loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
export const getLMStudioApiEndpoint = () => loadConfig().MODELS.LM_STUDIO.API_URL;
const mergeConfigs = (current: any, update: any): any => { const mergeConfigs = (current: any, update: any): any => {
if (update === null || update === undefined) { if (update === null || update === undefined) {
return current; return current;
@ -128,13 +109,10 @@ const mergeConfigs = (current: any, update: any): any => {
}; };
export const updateConfig = (config: RecursivePartial<Config>) => { export const updateConfig = (config: RecursivePartial<Config>) => {
// Server-side only const currentConfig = loadConfig();
if (typeof window === 'undefined') { const mergedConfig = mergeConfigs(currentConfig, config);
const currentConfig = loadConfig(); fs.writeFileSync(
const mergedConfig = mergeConfigs(currentConfig, config); path.join(path.join(process.cwd(), `${configFileName}`)),
fs.writeFileSync( toml.stringify(mergedConfig),
path.join(path.join(process.cwd(), `${configFileName}`)), );
toml.stringify(mergedConfig),
);
}
}; };

View File

@ -3,42 +3,9 @@ import Database from 'better-sqlite3';
import * as schema from './schema'; import * as schema from './schema';
import path from 'path'; import path from 'path';
// Create SQLite connection
const sqlite = new Database(path.join(process.cwd(), 'data/db.sqlite')); const sqlite = new Database(path.join(process.cwd(), 'data/db.sqlite'));
const db = drizzle(sqlite, { const db = drizzle(sqlite, {
schema: schema, schema: schema,
}); });
// Initialize database schema
(function initializeDatabase() {
console.log('[DB] Checking database schema...');
try {
// Check if userPreferences table exists
const tableExists = sqlite.prepare(`
SELECT name FROM sqlite_master
WHERE type='table' AND name=?;
`).all('userPreferences').length > 0;
if (!tableExists) {
console.log('[DB] Creating userPreferences table...');
sqlite.prepare(`
CREATE TABLE userPreferences (
id INTEGER PRIMARY KEY,
userId TEXT NOT NULL UNIQUE,
categories TEXT DEFAULT '[]' NOT NULL,
languages TEXT DEFAULT '[]' NOT NULL,
createdAt TEXT NOT NULL,
updatedAt TEXT NOT NULL
);
`).run();
console.log('[DB] userPreferences table created successfully.');
} else {
console.log('[DB] userPreferences table already exists.');
}
} catch (error) {
console.error('[DB] Error during database initialization:', error);
}
})();
export default db; export default db;

View File

@ -1,61 +0,0 @@
import db from './index';
import { userPreferences } from './schema';
import { sql } from 'drizzle-orm';
/**
* Run database migrations to ensure schema is up to date.
* This is designed to run once at application startup.
*/
export async function runMigrations() {
console.log('[DB Migration] Checking database schema...');
try {
// Check if userPreferences table exists
const tableExists = await checkIfTableExists('userPreferences');
if (!tableExists) {
console.log('[DB Migration] Creating userPreferences table...');
await createUserPreferencesTable();
console.log('[DB Migration] userPreferences table created successfully.');
} else {
console.log('[DB Migration] userPreferences table already exists.');
}
console.log('[DB Migration] Database schema is up to date.');
} catch (error) {
console.error('[DB Migration] Error during migration:', error);
// Don't throw the error - we want the application to continue even if migration fails
}
}
/**
* Check if a table exists in the database
*/
async function checkIfTableExists(tableName: string): Promise<boolean> {
const result = db.$client.prepare(`
SELECT name FROM sqlite_master
WHERE type='table' AND name=?;
`).all(tableName);
return result.length > 0;
}
/**
* Create the userPreferences table using the schema definition
*/
async function createUserPreferencesTable() {
// Create the table using a raw SQL query based on our schema
db.$client.prepare(`
CREATE TABLE userPreferences (
id INTEGER PRIMARY KEY,
userId TEXT NOT NULL UNIQUE,
categories TEXT DEFAULT '[]' NOT NULL,
languages TEXT DEFAULT '[]' NOT NULL,
createdAt TEXT NOT NULL,
updatedAt TEXT NOT NULL
);
`).run();
}
// Run migrations automatically when this module is imported
runMigrations();

View File

@ -26,17 +26,3 @@ export const chats = sqliteTable('chats', {
.$type<File[]>() .$type<File[]>()
.default(sql`'[]'`), .default(sql`'[]'`),
}); });
// Add user preferences table for Discover features
export const userPreferences = sqliteTable('userPreferences', {
id: integer('id').primaryKey(),
userId: text('userId').notNull().unique(),
categories: text('categories', { mode: 'json' })
.$type<string[]>()
.default(sql`'[]'`), // Categories will be set at the application level
languages: text('languages', { mode: 'json' })
.$type<string[]>()
.default(sql`'[]'`), // Languages will be set at the application level
createdAt: text('createdAt').notNull(),
updatedAt: text('updatedAt').notNull(),
});

View File

@ -1,11 +1,6 @@
import { ChatAnthropic } from '@langchain/anthropic'; import { ChatAnthropic } from '@langchain/anthropic';
import { ChatModel } from '.'; import { ChatModel } from '.';
import { getAnthropicApiKey } from '../config'; import { getAnthropicApiKey } from '../config';
export const PROVIDER_INFO = {
key: 'anthropic',
displayName: 'Anthropic'
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const anthropicChatModels: Record<string, string>[] = [ const anthropicChatModels: Record<string, string>[] = [

View File

@ -3,11 +3,6 @@ import { getDeepseekApiKey } from '../config';
import { ChatModel } from '.'; import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
export const PROVIDER_INFO = {
key: 'deepseek',
displayName: 'Deepseek AI'
};
const deepseekChatModels: Record<string, string>[] = [ const deepseekChatModels: Record<string, string>[] = [
{ {
displayName: 'Deepseek Chat (Deepseek V3)', displayName: 'Deepseek Chat (Deepseek V3)',

View File

@ -4,11 +4,6 @@ import {
} from '@langchain/google-genai'; } from '@langchain/google-genai';
import { getGeminiApiKey } from '../config'; import { getGeminiApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.'; import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'gemini',
displayName: 'Google Gemini'
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';

View File

@ -1,11 +1,6 @@
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import { getGroqApiKey } from '../config'; import { getGroqApiKey } from '../config';
import { ChatModel } from '.'; import { ChatModel } from '.';
export const PROVIDER_INFO = {
key: 'groq',
displayName: 'Groq'
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const groqChatModels: Record<string, string>[] = [ const groqChatModels: Record<string, string>[] = [

View File

@ -1,34 +1,18 @@
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels, PROVIDER_INFO as OpenAIInfo, PROVIDER_INFO } from './openai'; import { loadOpenAIChatModels, loadOpenAIEmbeddingModels } from './openai';
import { import {
getCustomOpenaiApiKey, getCustomOpenaiApiKey,
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
getCustomOpenaiModelName, getCustomOpenaiModelName,
} from '../config'; } from '../config';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import { loadOllamaChatModels, loadOllamaEmbeddingModels, PROVIDER_INFO as OllamaInfo } from './ollama'; import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama';
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq'; import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels, PROVIDER_INFO as AnthropicInfo } from './anthropic'; import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels, PROVIDER_INFO as GeminiInfo } from './gemini'; import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
import { loadTransformersEmbeddingsModels, PROVIDER_INFO as TransformersInfo } from './transformers'; import { loadTransformersEmbeddingsModels } from './transformers';
import { loadDeepseekChatModels, PROVIDER_INFO as DeepseekInfo } from './deepseek'; import { loadDeepseekChatModels } from './deepseek';
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels, PROVIDER_INFO as LMStudioInfo } from './lmstudio';
export const PROVIDER_METADATA = {
openai: OpenAIInfo,
ollama: OllamaInfo,
groq: GroqInfo,
anthropic: AnthropicInfo,
gemini: GeminiInfo,
transformers: TransformersInfo,
deepseek: DeepseekInfo,
lmstudio: LMStudioInfo,
custom_openai: {
key: 'custom_openai',
displayName: 'Custom OpenAI'
}
};
export interface ChatModel { export interface ChatModel {
displayName: string; displayName: string;
@ -50,7 +34,6 @@ export const chatModelProviders: Record<
anthropic: loadAnthropicChatModels, anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels, gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels, deepseek: loadDeepseekChatModels,
lmstudio: loadLMStudioChatModels,
}; };
export const embeddingModelProviders: Record< export const embeddingModelProviders: Record<
@ -61,7 +44,6 @@ export const embeddingModelProviders: Record<
ollama: loadOllamaEmbeddingModels, ollama: loadOllamaEmbeddingModels,
gemini: loadGeminiEmbeddingModels, gemini: loadGeminiEmbeddingModels,
transformers: loadTransformersEmbeddingsModels, transformers: loadTransformersEmbeddingsModels,
lmstudio: loadLMStudioEmbeddingsModels,
}; };
export const getAvailableChatModelProviders = async () => { export const getAvailableChatModelProviders = async () => {

View File

@ -1,106 +0,0 @@
import { getKeepAlive, getLMStudioApiEndpoint } from '../config';
import axios from 'axios';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'lmstudio',
displayName: 'LM Studio'
};
import { ChatOpenAI } from '@langchain/openai';
import { OpenAIEmbeddings } from '@langchain/openai';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
interface LMStudioModel {
id: string;
name?: string;
}
const ensureV1Endpoint = (endpoint: string): string =>
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
try {
const keepAlive = getKeepAlive();
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
timeout: parseInt(keepAlive) * 1000 || 5000,
headers: { 'Content-Type': 'application/json' },
});
return true;
} catch {
return false;
}
};
export const loadLMStudioChatModels = async () => {
const endpoint = getLMStudioApiEndpoint();
const keepAlive = getKeepAlive();
if (!endpoint) return {};
if (!await checkServerAvailability(endpoint)) return {};
try {
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
timeout: parseInt(keepAlive) * 1000 || 5000,
headers: { 'Content-Type': 'application/json' },
});
const chatModels: Record<string, ChatModel> = {};
response.data.data.forEach((model: LMStudioModel) => {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
openAIApiKey: 'lm-studio',
configuration: {
baseURL: ensureV1Endpoint(endpoint),
},
modelName: model.id,
temperature: 0.7,
streaming: true,
maxRetries: 3
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading LM Studio models: ${err}`);
return {};
}
};
export const loadLMStudioEmbeddingsModels = async () => {
const endpoint = getLMStudioApiEndpoint();
const keepAlive = getKeepAlive();
if (!endpoint) return {};
if (!await checkServerAvailability(endpoint)) return {};
try {
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
timeout: parseInt(keepAlive) * 1000 || 5000,
headers: { 'Content-Type': 'application/json' },
});
const embeddingsModels: Record<string, EmbeddingModel> = {};
response.data.data.forEach((model: LMStudioModel) => {
embeddingsModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
openAIApiKey: 'lm-studio',
configuration: {
baseURL: ensureV1Endpoint(endpoint),
},
modelName: model.id,
}) as unknown as Embeddings,
};
});
return embeddingsModels;
} catch (err) {
console.error(`Error loading LM Studio embeddings model: ${err}`);
return {};
}
};

View File

@ -1,11 +1,6 @@
import axios from 'axios'; import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint } from '../config'; import { getKeepAlive, getOllamaApiEndpoint } from '../config';
import { ChatModel, EmbeddingModel } from '.'; import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'ollama',
displayName: 'Ollama'
};
import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';

View File

@ -1,11 +1,6 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { getOpenaiApiKey } from '../config'; import { getOpenaiApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.'; import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'openai',
displayName: 'OpenAI'
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';

View File

@ -1,10 +1,5 @@
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer'; import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
export const PROVIDER_INFO = {
key: 'transformers',
displayName: 'Hugging Face'
};
export const loadTransformersEmbeddingsModels = async () => { export const loadTransformersEmbeddingsModels = async () => {
try { try {
const embeddingModels = { const embeddingModels = {

View File

@ -6,24 +6,20 @@ import {
MessagesPlaceholder, MessagesPlaceholder,
PromptTemplate, PromptTemplate,
} from '@langchain/core/prompts'; } from '@langchain/core/prompts';
import {
RunnableLambda,
RunnableMap,
RunnableSequence,
} from '@langchain/core/runnables';
import { BaseMessage } from '@langchain/core/messages'; import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers'; import { StringOutputParser } from '@langchain/core/output_parsers';
import LineListOutputParser from '../outputParsers/listLineOutputParser'; import LineListOutputParser from '../outputParsers/listLineOutputParser';
import LineOutputParser from '../outputParsers/lineOutputParser'; import LineOutputParser from '../outputParsers/lineOutputParser';
import { getDocumentsFromLinks } from '../utils/documents'; import { getDocumentsFromLinks } from '../utils/documents';
import { Document } from 'langchain/document'; import { Document } from 'langchain/document';
import { searchSearxng } from '../searxng'; import { searchSearxng, SearxngSearchResult } from '../searxng';
import path from 'node:path'; import path from 'node:path';
import fs from 'node:fs'; import fs from 'node:fs';
import computeSimilarity from '../utils/computeSimilarity'; import computeSimilarity from '../utils/computeSimilarity';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
import eventEmitter from 'events'; import eventEmitter from 'events';
import { StreamEvent } from '@langchain/core/tracers/log_stream'; import { StreamEvent } from '@langchain/core/tracers/log_stream';
import { EventEmitter } from 'node:stream';
export interface MetaSearchAgentType { export interface MetaSearchAgentType {
searchAndAnswer: ( searchAndAnswer: (
@ -47,7 +43,7 @@ interface Config {
activeEngines: string[]; activeEngines: string[];
} }
type BasicChainInput = { type SearchInput = {
chat_history: BaseMessage[]; chat_history: BaseMessage[];
query: string; query: string;
}; };
@ -60,237 +56,385 @@ class MetaSearchAgent implements MetaSearchAgentType {
this.config = config; this.config = config;
} }
private async createSearchRetrieverChain(llm: BaseChatModel) { private async searchSources(
llm: BaseChatModel,
input: SearchInput,
emitter: EventEmitter,
) {
(llm as unknown as ChatOpenAI).temperature = 0; (llm as unknown as ChatOpenAI).temperature = 0;
return RunnableSequence.from([ const chatPrompt = PromptTemplate.fromTemplate(
PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt), this.config.queryGeneratorPrompt,
llm, );
this.strParser,
RunnableLambda.from(async (input: string) => {
const linksOutputParser = new LineListOutputParser({
key: 'links',
});
const questionOutputParser = new LineOutputParser({ const processedChatPrompt = await chatPrompt.invoke({
key: 'question', chat_history: formatChatHistoryAsString(input.chat_history),
}); query: input.query,
});
const links = await linksOutputParser.parse(input); const llmRes = await llm.invoke(processedChatPrompt);
let question = this.config.summarizer const messageStr = await this.strParser.invoke(llmRes);
? await questionOutputParser.parse(input)
: input;
if (question === 'not_needed') { const linksOutputParser = new LineListOutputParser({
return { query: '', docs: [] }; key: 'links',
});
const questionOutputParser = new LineOutputParser({
key: 'question',
});
const links = await linksOutputParser.parse(messageStr);
let question = this.config.summarizer
? await questionOutputParser.parse(messageStr)
: messageStr;
if (question === 'not_needed') {
return { query: '', docs: [] };
}
if (links.length > 0) {
if (question.length === 0) {
question = 'summarize';
}
let docs: Document[] = [];
const linkDocs = await getDocumentsFromLinks({ links });
const docGroups: Document[] = [];
linkDocs.map((doc) => {
const URLDocExists = docGroups.find(
(d) =>
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10,
);
if (!URLDocExists) {
docGroups.push({
...doc,
metadata: {
...doc.metadata,
totalDocs: 1,
},
});
} }
if (links.length > 0) { const docIndex = docGroups.findIndex(
if (question.length === 0) { (d) =>
question = 'summarize'; d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10,
} );
let docs: Document[] = []; if (docIndex !== -1) {
docGroups[docIndex].pageContent =
const linkDocs = await getDocumentsFromLinks({ links }); docGroups[docIndex].pageContent + `\n\n` + doc.pageContent;
docGroups[docIndex].metadata.totalDocs += 1;
const docGroups: Document[] = [];
linkDocs.map((doc) => {
const URLDocExists = docGroups.find(
(d) =>
d.metadata.url === doc.metadata.url &&
d.metadata.totalDocs < 10,
);
if (!URLDocExists) {
docGroups.push({
...doc,
metadata: {
...doc.metadata,
totalDocs: 1,
},
});
}
const docIndex = docGroups.findIndex(
(d) =>
d.metadata.url === doc.metadata.url &&
d.metadata.totalDocs < 10,
);
if (docIndex !== -1) {
docGroups[docIndex].pageContent =
docGroups[docIndex].pageContent + `\n\n` + doc.pageContent;
docGroups[docIndex].metadata.totalDocs += 1;
}
});
await Promise.all(
docGroups.map(async (doc) => {
const res = await llm.invoke(`
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
The text will be shared inside the \`text\` XML tag, and the query inside the \`query\` XML tag.
<example>
1. \`<text>
Docker is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers.
It was first released in 2013 and is developed by Docker, Inc. Docker is designed to make it easier to create, deploy, and run applications
by using containers.
</text>
<query>
What is Docker and how does it work?
</query>
Response:
Docker is a revolutionary platform-as-a-service product developed by Docker, Inc., that uses container technology to make application
deployment more efficient. It allows developers to package their software with all necessary dependencies, making it easier to run in
any environment. Released in 2013, Docker has transformed the way applications are built, deployed, and managed.
\`
2. \`<text>
The theory of relativity, or simply relativity, encompasses two interrelated theories of Albert Einstein: special relativity and general
relativity. However, the word "relativity" is sometimes used in reference to Galilean invariance. The term "theory of relativity" was based
on the expression "relative theory" used by Max Planck in 1906. The theory of relativity usually encompasses two interrelated theories by
Albert Einstein: special relativity and general relativity. Special relativity applies to all physical phenomena in the absence of gravity.
General relativity explains the law of gravitation and its relation to other forces of nature. It applies to the cosmological and astrophysical
realm, including astronomy.
</text>
<query>
summarize
</query>
Response:
The theory of relativity, developed by Albert Einstein, encompasses two main theories: special relativity and general relativity. Special
relativity applies to all physical phenomena in the absence of gravity, while general relativity explains the law of gravitation and its
relation to other forces of nature. The theory of relativity is based on the concept of "relative theory," as introduced by Max Planck in
1906. It is a fundamental theory in physics that has revolutionized our understanding of the universe.
\`
</example>
Everything below is the actual data you will be working with. Good luck!
<query>
${question}
</query>
<text>
${doc.pageContent}
</text>
Make sure to answer the query in the summary.
`);
const document = new Document({
pageContent: res.content as string,
metadata: {
title: doc.metadata.title,
url: doc.metadata.url,
},
});
docs.push(document);
}),
);
return { query: question, docs: docs };
} else {
question = question.replace(/<think>.*?<\/think>/g, '');
const res = await searchSearxng(question, {
language: 'en',
engines: this.config.activeEngines,
});
const documents = res.results.map(
(result) =>
new Document({
pageContent:
result.content ||
(this.config.activeEngines.includes('youtube')
? result.title
: '') /* Todo: Implement transcript grabbing using Youtubei (source: https://www.npmjs.com/package/youtubei) */,
metadata: {
title: result.title,
url: result.url,
...(result.img_src && { img_src: result.img_src }),
},
}),
);
return { query: question, docs: documents };
} }
}), });
]);
await Promise.all(
docGroups.map(async (doc) => {
const res = await llm.invoke(`
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
The text will be shared inside the \`text\` XML tag, and the query inside the \`query\` XML tag.
<example>
1. \`<text>
Docker is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers.
It was first released in 2013 and is developed by Docker, Inc. Docker is designed to make it easier to create, deploy, and run applications
by using containers.
</text>
<query>
What is Docker and how does it work?
</query>
Response:
Docker is a revolutionary platform-as-a-service product developed by Docker, Inc., that uses container technology to make application
deployment more efficient. It allows developers to package their software with all necessary dependencies, making it easier to run in
any environment. Released in 2013, Docker has transformed the way applications are built, deployed, and managed.
\`
2. \`<text>
The theory of relativity, or simply relativity, encompasses two interrelated theories of Albert Einstein: special relativity and general
relativity. However, the word "relativity" is sometimes used in reference to Galilean invariance. The term "theory of relativity" was based
on the expression "relative theory" used by Max Planck in 1906. The theory of relativity usually encompasses two interrelated theories by
Albert Einstein: special relativity and general relativity. Special relativity applies to all physical phenomena in the absence of gravity.
General relativity explains the law of gravitation and its relation to other forces of nature. It applies to the cosmological and astrophysical
realm, including astronomy.
</text>
<query>
summarize
</query>
Response:
The theory of relativity, developed by Albert Einstein, encompasses two main theories: special relativity and general relativity. Special
relativity applies to all physical phenomena in the absence of gravity, while general relativity explains the law of gravitation and its
relation to other forces of nature. The theory of relativity is based on the concept of "relative theory," as introduced by Max Planck in
1906. It is a fundamental theory in physics that has revolutionized our understanding of the universe.
\`
</example>
Everything below is the actual data you will be working with. Good luck!
<query>
${question}
</query>
<text>
${doc.pageContent}
</text>
Make sure to answer the query in the summary.
`);
const document = new Document({
pageContent: res.content as string,
metadata: {
title: doc.metadata.title,
url: doc.metadata.url,
},
});
docs.push(document);
}),
);
return { query: question, docs: docs };
} else {
question = question.replace(/<think>.*?<\/think>/g, '');
const res = await searchSearxng(question, {
language: 'en',
engines: this.config.activeEngines,
});
const documents = res.results.map(
(result) =>
new Document({
pageContent:
result.content ||
(this.config.activeEngines.includes('youtube')
? result.title
: '') /* Todo: Implement transcript grabbing using Youtubei (source: https://www.npmjs.com/package/youtubei) */,
metadata: {
title: result.title,
url: result.url,
...(result.img_src && { img_src: result.img_src }),
},
}),
);
return { query: question, docs: documents };
}
} }
private async createAnsweringChain( private async performDeepResearch(
llm: BaseChatModel,
input: SearchInput,
emitter: EventEmitter,
) {
(llm as unknown as ChatOpenAI).temperature = 0;
const queryGenPrompt = PromptTemplate.fromTemplate(
this.config.queryGeneratorPrompt,
);
const formattedChatPrompt = await queryGenPrompt.invoke({
chat_history: formatChatHistoryAsString(input.chat_history),
query: input.query,
});
let i = 0;
let currentQuery = await this.strParser.invoke(
await llm.invoke(formattedChatPrompt),
);
const originalQuery = currentQuery;
const pastQueries: string[] = [];
const results: SearxngSearchResult[] = [];
while (i < 10) {
const res = await searchSearxng(currentQuery, {
language: 'en',
engines: this.config.activeEngines,
});
results.push(...res.results);
const reflectorPrompt = PromptTemplate.fromTemplate(`
You are an LLM that is tasked with reflecting on the results of a search query.
## Goal
You will be given question of the user, a list of search results collected from the web to answer that question along with past queries made to collect those results. You have to analyze the results based on user's question and do the following:
1. Identify unexplored areas or areas with less detailed information in the results and generate a new query that focuses on those areas. The new queries should be more specific and a similar query shall not exist in past queries which will be provided to you. Make sure to include keywords that you're looking for because the new query will be used to search the web for information on that topic. Make sure the query contains only 1 question and is not too long to ensure it is Search Engine friendly.
2. You'll have to generate a description explaining what you are doing for example "I am looking for more information about X" or "Understanding how X works" etc. The description should be short and concise.
## Output format
You need to output in XML format and do not generate any other text. ake sure to not include any other text in the output or start a conversation in the output. The output should be in the following format:
<query>(query)</query>
<description>(description)</description>
## Example
Say the user asked "What is Llama 4 by Meta?" and let search results contain information about Llama 4 being an LLM and very little information about its features. You can output:
<query>Llama 4 features</query> // Generate queries that capture keywords for SEO and not making words like "How", "What", "Why" etc.
<description>Looking for new features in Llama 4</description>
or something like
<query>How is Llama 4 better than its previous generation models</query>
<description>Understanding the difference between Llama 4 and previous generation models.</description>
## BELOW IS THE ACTUAL DATA YOU WILL BE WORKING WITH. IT IS NOT A PART OF EXAMPLES. YOU'LL HAVE TO GENERATE YOUR ANSWER BASED ON THIS DATA.
<user_question>\n{question}\n</user_question>
<search_results>\n{search_results}\n</search_results>
<past_queries>\n{past_queries}\n</past_queries>
Response:
`);
const formattedReflectorPrompt = await reflectorPrompt.invoke({
question: originalQuery,
search_results: results
.map(
(result) => `<result>${result.title} - ${result.content}</result>`,
)
.join('\n'),
past_queries: pastQueries.map((q) => `<query>${q}</query>`).join('\n'),
});
const feedback = await this.strParser.invoke(
await llm.invoke(formattedReflectorPrompt),
);
console.log(`Feedback: ${feedback}`);
const queryOutputParser = new LineOutputParser({
key: 'query',
});
const descriptionOutputParser = new LineOutputParser({
key: 'description',
});
currentQuery = await queryOutputParser.parse(feedback);
const description = await descriptionOutputParser.parse(feedback);
console.log(`Query: ${currentQuery}`);
console.log(`Description: ${description}`);
pastQueries.push(currentQuery);
++i;
}
const uniqueResults: SearxngSearchResult[] = [];
results.forEach((res) => {
const exists = uniqueResults.find((r) => r.url === res.url);
if (!exists) {
uniqueResults.push(res);
} else {
exists.content += `\n\n` + res.content;
}
});
const documents = uniqueResults /* .slice(0, 50) */
.map(
(r) =>
new Document({
pageContent: r.content || '',
metadata: {
title: r.title,
url: r.url,
...(r.img_src && { img_src: r.img_src }),
},
}),
);
return documents;
}
private async streamAnswer(
llm: BaseChatModel, llm: BaseChatModel,
fileIds: string[], fileIds: string[],
embeddings: Embeddings, embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality', optimizationMode: 'speed' | 'balanced' | 'quality',
systemInstructions: string, systemInstructions: string,
input: SearchInput,
emitter: EventEmitter,
) { ) {
return RunnableSequence.from([ const chatPrompt = ChatPromptTemplate.fromMessages([
RunnableMap.from({ ['system', this.config.responsePrompt],
systemInstructions: () => systemInstructions, new MessagesPlaceholder('chat_history'),
query: (input: BasicChainInput) => input.query, ['user', '{query}'],
chat_history: (input: BasicChainInput) => input.chat_history, ]);
date: () => new Date().toISOString(),
context: RunnableLambda.from(async (input: BasicChainInput) => {
const processedHistory = formatChatHistoryAsString(
input.chat_history,
);
let docs: Document[] | null = null; let context = '';
let query = input.query;
if (this.config.searchWeb) { if (optimizationMode === 'speed' || optimizationMode === 'balanced') {
const searchRetrieverChain = let docs: Document[] | null = null;
await this.createSearchRetrieverChain(llm); let query = input.query;
const searchRetrieverResult = await searchRetrieverChain.invoke({ if (this.config.searchWeb) {
chat_history: processedHistory, const searchResults = await this.searchSources(llm, input, emitter);
query,
});
query = searchRetrieverResult.query; query = searchResults.query;
docs = searchRetrieverResult.docs; docs = searchResults.docs;
} }
const sortedDocs = await this.rerankDocs( const sortedDocs = await this.rerankDocs(
query, query,
docs ?? [], docs ?? [],
fileIds, fileIds,
embeddings, embeddings,
optimizationMode, optimizationMode,
); );
return sortedDocs; emitter.emit(
}) 'data',
.withConfig({ JSON.stringify({ type: 'sources', data: sortedDocs }),
runName: 'FinalSourceRetriever', );
})
.pipe(this.processDocs), context = this.processDocs(sortedDocs);
}), } else if (optimizationMode === 'quality') {
ChatPromptTemplate.fromMessages([ let docs: Document[] = [];
['system', this.config.responsePrompt],
new MessagesPlaceholder('chat_history'), docs = await this.performDeepResearch(llm, input, emitter);
['user', '{query}'],
]), emitter.emit('data', JSON.stringify({ type: 'sources', data: docs }));
llm,
this.strParser, context = this.processDocs(docs);
]).withConfig({ }
runName: 'FinalResponseGenerator',
const formattedChatPrompt = await chatPrompt.invoke({
query: input.query,
chat_history: input.chat_history,
date: new Date().toISOString(),
context: context,
systemInstructions: systemInstructions,
}); });
const llmRes = await llm.stream(formattedChatPrompt);
for await (const data of llmRes) {
const messageStr = await this.strParser.invoke(data);
emitter.emit(
'data',
JSON.stringify({ type: 'response', data: messageStr }),
);
}
emitter.emit('end');
} }
private async rerankDocs( private async rerankDocs(
@ -426,44 +570,13 @@ class MetaSearchAgent implements MetaSearchAgentType {
return docs return docs
.map( .map(
(_, index) => (_, index) =>
`${index + 1}. ${docs[index].metadata.title} ${docs[index].pageContent}`, `${index + 1}. ${docs[index].metadata.title} ${
docs[index].pageContent
}`,
) )
.join('\n'); .join('\n');
} }
private async handleStream(
stream: AsyncGenerator<StreamEvent, any, any>,
emitter: eventEmitter,
) {
for await (const event of stream) {
if (
event.event === 'on_chain_end' &&
event.name === 'FinalSourceRetriever'
) {
``;
emitter.emit(
'data',
JSON.stringify({ type: 'sources', data: event.data.output }),
);
}
if (
event.event === 'on_chain_stream' &&
event.name === 'FinalResponseGenerator'
) {
emitter.emit(
'data',
JSON.stringify({ type: 'response', data: event.data.chunk }),
);
}
if (
event.event === 'on_chain_end' &&
event.name === 'FinalResponseGenerator'
) {
emitter.emit('end');
}
}
}
async searchAndAnswer( async searchAndAnswer(
message: string, message: string,
history: BaseMessage[], history: BaseMessage[],
@ -475,26 +588,19 @@ class MetaSearchAgent implements MetaSearchAgentType {
) { ) {
const emitter = new eventEmitter(); const emitter = new eventEmitter();
const answeringChain = await this.createAnsweringChain( this.streamAnswer(
llm, llm,
fileIds, fileIds,
embeddings, embeddings,
optimizationMode, optimizationMode,
systemInstructions, systemInstructions,
);
const stream = answeringChain.streamEvents(
{ {
chat_history: history, chat_history: history,
query: message, query: message,
}, },
{ emitter,
version: 'v1',
},
); );
this.handleStream(stream, emitter);
return emitter; return emitter;
} }
} }

View File

@ -8,7 +8,7 @@ interface SearxngSearchOptions {
pageno?: number; pageno?: number;
} }
interface SearxngSearchResult { export interface SearxngSearchResult {
title: string; title: string;
url: string; url: string;
img_src?: string; img_src?: string;