mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-04-30 16:22:29 +00:00
Compare commits
12 Commits
18533d58c2
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
68e151b2bd | ||
|
06ff272541 | ||
|
4154d5e4b1 | ||
|
1862491496 | ||
|
073b5e897c | ||
|
9a332e79e4 | ||
|
72450b9217 | ||
|
7e1dc33a08 | ||
|
aa240009ab | ||
|
41b258e4d8 | ||
|
28b9cca413 | ||
|
8aaee2c40c |
17
README.md
17
README.md
@ -1,21 +1,5 @@
|
|||||||
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc -->
|
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc -->
|
||||||
|
|
||||||
<div align="center" markdown="1">
|
|
||||||
<sup>Special thanks to:</sup>
|
|
||||||
<br>
|
|
||||||
<br>
|
|
||||||
<a href="https://www.warp.dev/perplexica">
|
|
||||||
<img alt="Warp sponsorship" width="400" src="https://github.com/user-attachments/assets/775dd593-9b5f-40f1-bf48-479faff4c27b">
|
|
||||||
</a>
|
|
||||||
|
|
||||||
### [Warp, the AI Devtool that lives in your terminal](https://www.warp.dev/perplexica)
|
|
||||||
|
|
||||||
[Available for MacOS, Linux, & Windows](https://www.warp.dev/perplexica)
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<hr/>
|
|
||||||
|
|
||||||
[](https://discord.gg/26aArMy8tT)
|
[](https://discord.gg/26aArMy8tT)
|
||||||
|
|
||||||

|

|
||||||
@ -159,6 +143,7 @@ Perplexica runs on Next.js and handles all API requests. It works right away on
|
|||||||
|
|
||||||
[](https://usw.sealos.io/?openapp=system-template%3FtemplateName%3Dperplexica)
|
[](https://usw.sealos.io/?openapp=system-template%3FtemplateName%3Dperplexica)
|
||||||
[](https://repocloud.io/details/?app_id=267)
|
[](https://repocloud.io/details/?app_id=267)
|
||||||
|
[](https://template.run.claw.cloud/?referralCode=U11MRQ8U9RM4&openapp=system-fastdeploy%3FtemplateName%3Dperplexica)
|
||||||
|
|
||||||
## Upcoming Features
|
## Upcoming Features
|
||||||
|
|
||||||
|
@ -25,9 +25,8 @@ API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
|||||||
[MODELS.DEEPSEEK]
|
[MODELS.DEEPSEEK]
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
|
[MODELS.LM_STUDIO]
|
||||||
|
API_URL = "" # LM Studio API URL - http://host.docker.internal:1234
|
||||||
|
|
||||||
[API_ENDPOINTS]
|
[API_ENDPOINTS]
|
||||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
||||||
TAVILY = "" # Tavily API key
|
|
||||||
|
|
||||||
[SEARCH]
|
|
||||||
ENGINE = "searxng" # "searxng" or "tavily"
|
|
@ -8,8 +8,7 @@ import {
|
|||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
getDeepseekApiKey,
|
getDeepseekApiKey,
|
||||||
getSearchEngine,
|
getLMStudioApiEndpoint,
|
||||||
getTavilyApiKey,
|
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
import {
|
import {
|
||||||
@ -53,6 +52,7 @@ export const GET = async (req: Request) => {
|
|||||||
|
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||||
|
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
|
||||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
config['geminiApiKey'] = getGeminiApiKey();
|
config['geminiApiKey'] = getGeminiApiKey();
|
||||||
@ -60,8 +60,6 @@ export const GET = async (req: Request) => {
|
|||||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||||
config['searchEngine'] = getSearchEngine();
|
|
||||||
config['tavilyApiKey'] = getTavilyApiKey();
|
|
||||||
|
|
||||||
return Response.json({ ...config }, { status: 200 });
|
return Response.json({ ...config }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -97,18 +95,15 @@ export const POST = async (req: Request) => {
|
|||||||
DEEPSEEK: {
|
DEEPSEEK: {
|
||||||
API_KEY: config.deepseekApiKey,
|
API_KEY: config.deepseekApiKey,
|
||||||
},
|
},
|
||||||
|
LM_STUDIO: {
|
||||||
|
API_URL: config.lmStudioApiUrl,
|
||||||
|
},
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: config.customOpenaiApiUrl,
|
API_URL: config.customOpenaiApiUrl,
|
||||||
API_KEY: config.customOpenaiApiKey,
|
API_KEY: config.customOpenaiApiKey,
|
||||||
MODEL_NAME: config.customOpenaiModelName,
|
MODEL_NAME: config.customOpenaiModelName,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SEARCH: {
|
|
||||||
ENGINE: config.searchEngine,
|
|
||||||
},
|
|
||||||
API_ENDPOINTS: {
|
|
||||||
TAVILY: config.tavilyApiKey || '',
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
updateConfig(updatedConfig);
|
updateConfig(updatedConfig);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { searchSearxng } from '../../../lib/searchEngines/searxng';
|
import { searchSearxng } from '@/lib/searxng';
|
||||||
|
|
||||||
const articleWebsites = [
|
const articleWebsites = [
|
||||||
'yahoo.com',
|
'yahoo.com',
|
||||||
|
@ -7,6 +7,7 @@ import { Switch } from '@headlessui/react';
|
|||||||
import ThemeSwitcher from '@/components/theme/Switcher';
|
import ThemeSwitcher from '@/components/theme/Switcher';
|
||||||
import { ImagesIcon, VideoIcon } from 'lucide-react';
|
import { ImagesIcon, VideoIcon } from 'lucide-react';
|
||||||
import Link from 'next/link';
|
import Link from 'next/link';
|
||||||
|
import { PROVIDER_METADATA } from '@/lib/providers';
|
||||||
|
|
||||||
interface SettingsType {
|
interface SettingsType {
|
||||||
chatModelProviders: {
|
chatModelProviders: {
|
||||||
@ -20,12 +21,11 @@ interface SettingsType {
|
|||||||
anthropicApiKey: string;
|
anthropicApiKey: string;
|
||||||
geminiApiKey: string;
|
geminiApiKey: string;
|
||||||
ollamaApiUrl: string;
|
ollamaApiUrl: string;
|
||||||
|
lmStudioApiUrl: string;
|
||||||
deepseekApiKey: string;
|
deepseekApiKey: string;
|
||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
customOpenaiModelName: string;
|
customOpenaiModelName: string;
|
||||||
searchEngine: string;
|
|
||||||
tavilyApiKey?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
||||||
@ -147,7 +147,6 @@ const Page = () => {
|
|||||||
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
||||||
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
||||||
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
||||||
const [searchEngine, setSearchEngine] = useState<string>('searxng');
|
|
||||||
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@ -210,7 +209,6 @@ const Page = () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
setSystemInstructions(localStorage.getItem('systemInstructions')!);
|
setSystemInstructions(localStorage.getItem('systemInstructions')!);
|
||||||
setSearchEngine(localStorage.getItem('searchEngine') || 'searxng');
|
|
||||||
|
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
@ -370,10 +368,6 @@ const Page = () => {
|
|||||||
localStorage.setItem('embeddingModel', value);
|
localStorage.setItem('embeddingModel', value);
|
||||||
} else if (key === 'systemInstructions') {
|
} else if (key === 'systemInstructions') {
|
||||||
localStorage.setItem('systemInstructions', value);
|
localStorage.setItem('systemInstructions', value);
|
||||||
} else if (key === 'searchEngine') {
|
|
||||||
localStorage.setItem('searchEngine', value);
|
|
||||||
} else if (key === 'tavilyApiKey') {
|
|
||||||
localStorage.setItem('tavilyApiKey', value);
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Failed to save:', err);
|
console.error('Failed to save:', err);
|
||||||
@ -516,32 +510,6 @@ const Page = () => {
|
|||||||
/>
|
/>
|
||||||
</Switch>
|
</Switch>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1 mt-2">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Search Engine
|
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={searchEngine}
|
|
||||||
onChange={(e) => {
|
|
||||||
const value = e.target.value;
|
|
||||||
setSearchEngine(value);
|
|
||||||
saveConfig('searchEngine', value);
|
|
||||||
}}
|
|
||||||
options={[
|
|
||||||
{ value: 'searxng', label: 'SearxNG' },
|
|
||||||
...(config.tavilyApiKey ? [{ value: 'tavily', label: 'Tavily' }] : []),
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
<p className="text-xs text-black/60 dark:text-white/60 mt-1">
|
|
||||||
Select which search engine to use for web searches
|
|
||||||
</p>
|
|
||||||
{searchEngine === 'tavily' && !config.tavilyApiKey && (
|
|
||||||
<p className="text-xs text-red-500 mt-1">
|
|
||||||
Tavily API key is required to use this search engine
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</SettingsSection>
|
</SettingsSection>
|
||||||
|
|
||||||
@ -582,8 +550,9 @@ const Page = () => {
|
|||||||
(provider) => ({
|
(provider) => ({
|
||||||
value: provider,
|
value: provider,
|
||||||
label:
|
label:
|
||||||
|
(PROVIDER_METADATA as any)[provider]?.displayName ||
|
||||||
provider.charAt(0).toUpperCase() +
|
provider.charAt(0).toUpperCase() +
|
||||||
provider.slice(1),
|
provider.slice(1),
|
||||||
}),
|
}),
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
@ -724,8 +693,9 @@ const Page = () => {
|
|||||||
(provider) => ({
|
(provider) => ({
|
||||||
value: provider,
|
value: provider,
|
||||||
label:
|
label:
|
||||||
|
(PROVIDER_METADATA as any)[provider]?.displayName ||
|
||||||
provider.charAt(0).toUpperCase() +
|
provider.charAt(0).toUpperCase() +
|
||||||
provider.slice(1),
|
provider.slice(1),
|
||||||
}),
|
}),
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
@ -893,29 +863,22 @@ const Page = () => {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1 mt-4 pt-4 border-t border-light-200 dark:border-dark-200">
|
|
||||||
<p className="text-black/90 dark:text-white/90 font-medium">Search Engine API Keys</p>
|
|
||||||
<p className="text-sm text-black/60 dark:text-white/60 mt-0.5">
|
|
||||||
API keys for search engines used in the application
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
Tavily API Key
|
LM Studio API URL
|
||||||
</p>
|
</p>
|
||||||
<Input
|
<Input
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="Tavily API key"
|
placeholder="LM Studio API URL"
|
||||||
value={config.tavilyApiKey || ''}
|
value={config.lmStudioApiUrl}
|
||||||
isSaving={savingStates['tavilyApiKey']}
|
isSaving={savingStates['lmStudioApiUrl']}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setConfig((prev) => ({
|
setConfig((prev) => ({
|
||||||
...prev!,
|
...prev!,
|
||||||
tavilyApiKey: e.target.value,
|
lmStudioApiUrl: e.target.value,
|
||||||
}));
|
}));
|
||||||
}}
|
}}
|
||||||
onSave={(value) => saveConfig('tavilyApiKey', value)}
|
onSave={(value) => saveConfig('lmStudioApiUrl', value)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -97,6 +97,7 @@ const MessageBox = ({
|
|||||||
},
|
},
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
setSpeechMessage(message.content.replace(regex, ''));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../searchEngines/searxng';
|
import { searchSearxng } from '../searxng';
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const imageSearchChainPrompt = `
|
const imageSearchChainPrompt = `
|
||||||
|
@ -7,7 +7,7 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../searchEngines/searxng';
|
import { searchSearxng } from '../searxng';
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const VideoSearchChainPrompt = `
|
const VideoSearchChainPrompt = `
|
||||||
|
@ -1,7 +1,14 @@
|
|||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
|
||||||
import toml from '@iarna/toml';
|
import toml from '@iarna/toml';
|
||||||
|
|
||||||
|
// Use dynamic imports for Node.js modules to prevent client-side errors
|
||||||
|
let fs: any;
|
||||||
|
let path: any;
|
||||||
|
if (typeof window === 'undefined') {
|
||||||
|
// We're on the server
|
||||||
|
fs = require('fs');
|
||||||
|
path = require('path');
|
||||||
|
}
|
||||||
|
|
||||||
const configFileName = 'config.toml';
|
const configFileName = 'config.toml';
|
||||||
|
|
||||||
interface Config {
|
interface Config {
|
||||||
@ -28,6 +35,9 @@ interface Config {
|
|||||||
DEEPSEEK: {
|
DEEPSEEK: {
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
};
|
};
|
||||||
|
LM_STUDIO: {
|
||||||
|
API_URL: string;
|
||||||
|
};
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: string;
|
API_URL: string;
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
@ -36,10 +46,6 @@ interface Config {
|
|||||||
};
|
};
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
TAVILY: string;
|
|
||||||
};
|
|
||||||
SEARCH: {
|
|
||||||
ENGINE: string;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,10 +53,17 @@ type RecursivePartial<T> = {
|
|||||||
[P in keyof T]?: RecursivePartial<T[P]>;
|
[P in keyof T]?: RecursivePartial<T[P]>;
|
||||||
};
|
};
|
||||||
|
|
||||||
const loadConfig = () =>
|
const loadConfig = () => {
|
||||||
toml.parse(
|
// Server-side only
|
||||||
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
if (typeof window === 'undefined') {
|
||||||
) as any as Config;
|
return toml.parse(
|
||||||
|
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
||||||
|
) as any as Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client-side fallback - settings will be loaded via API
|
||||||
|
return {} as Config;
|
||||||
|
};
|
||||||
|
|
||||||
export const getSimilarityMeasure = () =>
|
export const getSimilarityMeasure = () =>
|
||||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||||
@ -68,12 +81,6 @@ export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
|||||||
export const getSearxngApiEndpoint = () =>
|
export const getSearxngApiEndpoint = () =>
|
||||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getTavilyApiKey = () =>
|
|
||||||
process.env.TAVILY_API_KEY || loadConfig().API_ENDPOINTS.TAVILY;
|
|
||||||
|
|
||||||
export const getSearchEngine = () =>
|
|
||||||
process.env.SEARCH_ENGINE || loadConfig().SEARCH?.ENGINE || 'searxng';
|
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||||
|
|
||||||
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
||||||
@ -87,6 +94,9 @@ export const getCustomOpenaiApiUrl = () =>
|
|||||||
export const getCustomOpenaiModelName = () =>
|
export const getCustomOpenaiModelName = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||||
|
|
||||||
|
export const getLMStudioApiEndpoint = () =>
|
||||||
|
loadConfig().MODELS.LM_STUDIO.API_URL;
|
||||||
|
|
||||||
const mergeConfigs = (current: any, update: any): any => {
|
const mergeConfigs = (current: any, update: any): any => {
|
||||||
if (update === null || update === undefined) {
|
if (update === null || update === undefined) {
|
||||||
return current;
|
return current;
|
||||||
@ -119,10 +129,13 @@ const mergeConfigs = (current: any, update: any): any => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||||
const currentConfig = loadConfig();
|
// Server-side only
|
||||||
const mergedConfig = mergeConfigs(currentConfig, config);
|
if (typeof window === 'undefined') {
|
||||||
fs.writeFileSync(
|
const currentConfig = loadConfig();
|
||||||
path.join(path.join(process.cwd(), `${configFileName}`)),
|
const mergedConfig = mergeConfigs(currentConfig, config);
|
||||||
toml.stringify(mergedConfig),
|
fs.writeFileSync(
|
||||||
);
|
path.join(path.join(process.cwd(), `${configFileName}`)),
|
||||||
|
toml.stringify(mergedConfig),
|
||||||
|
);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
import { ChatAnthropic } from '@langchain/anthropic';
|
import { ChatAnthropic } from '@langchain/anthropic';
|
||||||
import { ChatModel } from '.';
|
import { ChatModel } from '.';
|
||||||
import { getAnthropicApiKey } from '../config';
|
import { getAnthropicApiKey } from '../config';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'anthropic',
|
||||||
|
displayName: 'Anthropic',
|
||||||
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const anthropicChatModels: Record<string, string>[] = [
|
const anthropicChatModels: Record<string, string>[] = [
|
||||||
|
@ -3,6 +3,11 @@ import { getDeepseekApiKey } from '../config';
|
|||||||
import { ChatModel } from '.';
|
import { ChatModel } from '.';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'deepseek',
|
||||||
|
displayName: 'Deepseek AI',
|
||||||
|
};
|
||||||
|
|
||||||
const deepseekChatModels: Record<string, string>[] = [
|
const deepseekChatModels: Record<string, string>[] = [
|
||||||
{
|
{
|
||||||
displayName: 'Deepseek Chat (Deepseek V3)',
|
displayName: 'Deepseek Chat (Deepseek V3)',
|
||||||
|
@ -4,6 +4,11 @@ import {
|
|||||||
} from '@langchain/google-genai';
|
} from '@langchain/google-genai';
|
||||||
import { getGeminiApiKey } from '../config';
|
import { getGeminiApiKey } from '../config';
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
import { ChatModel, EmbeddingModel } from '.';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'gemini',
|
||||||
|
displayName: 'Google Gemini',
|
||||||
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import { getGroqApiKey } from '../config';
|
import { getGroqApiKey } from '../config';
|
||||||
import { ChatModel } from '.';
|
import { ChatModel } from '.';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'groq',
|
||||||
|
displayName: 'Groq',
|
||||||
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const groqChatModels: Record<string, string>[] = [
|
const groqChatModels: Record<string, string>[] = [
|
||||||
|
@ -1,18 +1,60 @@
|
|||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels } from './openai';
|
import {
|
||||||
|
loadOpenAIChatModels,
|
||||||
|
loadOpenAIEmbeddingModels,
|
||||||
|
PROVIDER_INFO as OpenAIInfo,
|
||||||
|
PROVIDER_INFO,
|
||||||
|
} from './openai';
|
||||||
import {
|
import {
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama';
|
import {
|
||||||
import { loadGroqChatModels } from './groq';
|
loadOllamaChatModels,
|
||||||
import { loadAnthropicChatModels } from './anthropic';
|
loadOllamaEmbeddingModels,
|
||||||
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
|
PROVIDER_INFO as OllamaInfo,
|
||||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
} from './ollama';
|
||||||
import { loadDeepseekChatModels } from './deepseek';
|
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
|
||||||
|
import {
|
||||||
|
loadAnthropicChatModels,
|
||||||
|
PROVIDER_INFO as AnthropicInfo,
|
||||||
|
} from './anthropic';
|
||||||
|
import {
|
||||||
|
loadGeminiChatModels,
|
||||||
|
loadGeminiEmbeddingModels,
|
||||||
|
PROVIDER_INFO as GeminiInfo,
|
||||||
|
} from './gemini';
|
||||||
|
import {
|
||||||
|
loadTransformersEmbeddingsModels,
|
||||||
|
PROVIDER_INFO as TransformersInfo,
|
||||||
|
} from './transformers';
|
||||||
|
import {
|
||||||
|
loadDeepseekChatModels,
|
||||||
|
PROVIDER_INFO as DeepseekInfo,
|
||||||
|
} from './deepseek';
|
||||||
|
import {
|
||||||
|
loadLMStudioChatModels,
|
||||||
|
loadLMStudioEmbeddingsModels,
|
||||||
|
PROVIDER_INFO as LMStudioInfo,
|
||||||
|
} from './lmstudio';
|
||||||
|
|
||||||
|
export const PROVIDER_METADATA = {
|
||||||
|
openai: OpenAIInfo,
|
||||||
|
ollama: OllamaInfo,
|
||||||
|
groq: GroqInfo,
|
||||||
|
anthropic: AnthropicInfo,
|
||||||
|
gemini: GeminiInfo,
|
||||||
|
transformers: TransformersInfo,
|
||||||
|
deepseek: DeepseekInfo,
|
||||||
|
lmstudio: LMStudioInfo,
|
||||||
|
custom_openai: {
|
||||||
|
key: 'custom_openai',
|
||||||
|
displayName: 'Custom OpenAI',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
export interface ChatModel {
|
export interface ChatModel {
|
||||||
displayName: string;
|
displayName: string;
|
||||||
@ -34,6 +76,7 @@ export const chatModelProviders: Record<
|
|||||||
anthropic: loadAnthropicChatModels,
|
anthropic: loadAnthropicChatModels,
|
||||||
gemini: loadGeminiChatModels,
|
gemini: loadGeminiChatModels,
|
||||||
deepseek: loadDeepseekChatModels,
|
deepseek: loadDeepseekChatModels,
|
||||||
|
lmstudio: loadLMStudioChatModels,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const embeddingModelProviders: Record<
|
export const embeddingModelProviders: Record<
|
||||||
@ -44,6 +87,7 @@ export const embeddingModelProviders: Record<
|
|||||||
ollama: loadOllamaEmbeddingModels,
|
ollama: loadOllamaEmbeddingModels,
|
||||||
gemini: loadGeminiEmbeddingModels,
|
gemini: loadGeminiEmbeddingModels,
|
||||||
transformers: loadTransformersEmbeddingsModels,
|
transformers: loadTransformersEmbeddingsModels,
|
||||||
|
lmstudio: loadLMStudioEmbeddingsModels,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getAvailableChatModelProviders = async () => {
|
export const getAvailableChatModelProviders = async () => {
|
||||||
|
100
src/lib/providers/lmstudio.ts
Normal file
100
src/lib/providers/lmstudio.ts
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
import { getKeepAlive, getLMStudioApiEndpoint } from '../config';
|
||||||
|
import axios from 'axios';
|
||||||
|
import { ChatModel, EmbeddingModel } from '.';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'lmstudio',
|
||||||
|
displayName: 'LM Studio',
|
||||||
|
};
|
||||||
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||||
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
|
||||||
|
interface LMStudioModel {
|
||||||
|
id: string;
|
||||||
|
name?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ensureV1Endpoint = (endpoint: string): string =>
|
||||||
|
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
|
||||||
|
|
||||||
|
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const loadLMStudioChatModels = async () => {
|
||||||
|
const endpoint = getLMStudioApiEndpoint();
|
||||||
|
|
||||||
|
if (!endpoint) return {};
|
||||||
|
if (!(await checkServerAvailability(endpoint))) return {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const chatModels: Record<string, ChatModel> = {};
|
||||||
|
|
||||||
|
response.data.data.forEach((model: LMStudioModel) => {
|
||||||
|
chatModels[model.id] = {
|
||||||
|
displayName: model.name || model.id,
|
||||||
|
model: new ChatOpenAI({
|
||||||
|
openAIApiKey: 'lm-studio',
|
||||||
|
configuration: {
|
||||||
|
baseURL: ensureV1Endpoint(endpoint),
|
||||||
|
},
|
||||||
|
modelName: model.id,
|
||||||
|
temperature: 0.7,
|
||||||
|
streaming: true,
|
||||||
|
maxRetries: 3,
|
||||||
|
}) as unknown as BaseChatModel,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return chatModels;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Error loading LM Studio models: ${err}`);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const loadLMStudioEmbeddingsModels = async () => {
|
||||||
|
const endpoint = getLMStudioApiEndpoint();
|
||||||
|
|
||||||
|
if (!endpoint) return {};
|
||||||
|
if (!(await checkServerAvailability(endpoint))) return {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const embeddingsModels: Record<string, EmbeddingModel> = {};
|
||||||
|
|
||||||
|
response.data.data.forEach((model: LMStudioModel) => {
|
||||||
|
embeddingsModels[model.id] = {
|
||||||
|
displayName: model.name || model.id,
|
||||||
|
model: new OpenAIEmbeddings({
|
||||||
|
openAIApiKey: 'lm-studio',
|
||||||
|
configuration: {
|
||||||
|
baseURL: ensureV1Endpoint(endpoint),
|
||||||
|
},
|
||||||
|
modelName: model.id,
|
||||||
|
}) as unknown as Embeddings,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return embeddingsModels;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Error loading LM Studio embeddings model: ${err}`);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
@ -1,6 +1,11 @@
|
|||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import { getKeepAlive, getOllamaApiEndpoint } from '../config';
|
import { getKeepAlive, getOllamaApiEndpoint } from '../config';
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
import { ChatModel, EmbeddingModel } from '.';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'ollama',
|
||||||
|
displayName: 'Ollama',
|
||||||
|
};
|
||||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||||
|
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||||
import { getOpenaiApiKey } from '../config';
|
import { getOpenaiApiKey } from '../config';
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
import { ChatModel, EmbeddingModel } from '.';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'openai',
|
||||||
|
displayName: 'OpenAI',
|
||||||
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
|
||||||
@ -25,6 +30,18 @@ const openaiChatModels: Record<string, string>[] = [
|
|||||||
displayName: 'GPT-4 omni mini',
|
displayName: 'GPT-4 omni mini',
|
||||||
key: 'gpt-4o-mini',
|
key: 'gpt-4o-mini',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
displayName: 'GPT 4.1 nano',
|
||||||
|
key: 'gpt-4.1-nano',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'GPT 4.1 mini',
|
||||||
|
key: 'gpt-4.1-mini',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'GPT 4.1',
|
||||||
|
key: 'gpt-4.1',
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const openaiEmbeddingModels: Record<string, string>[] = [
|
const openaiEmbeddingModels: Record<string, string>[] = [
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
||||||
|
|
||||||
|
export const PROVIDER_INFO = {
|
||||||
|
key: 'transformers',
|
||||||
|
displayName: 'Hugging Face',
|
||||||
|
};
|
||||||
|
|
||||||
export const loadTransformersEmbeddingsModels = async () => {
|
export const loadTransformersEmbeddingsModels = async () => {
|
||||||
try {
|
try {
|
||||||
const embeddingModels = {
|
const embeddingModels = {
|
||||||
|
@ -17,9 +17,7 @@ import LineListOutputParser from '../outputParsers/listLineOutputParser';
|
|||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
import LineOutputParser from '../outputParsers/lineOutputParser';
|
||||||
import { getDocumentsFromLinks } from '../utils/documents';
|
import { getDocumentsFromLinks } from '../utils/documents';
|
||||||
import { Document } from 'langchain/document';
|
import { Document } from 'langchain/document';
|
||||||
import { searchTavily } from '../searchEngines/tavily';
|
import { searchSearxng } from '../searxng';
|
||||||
import { searchSearxng } from '../searchEngines/searxng';
|
|
||||||
import { getSearchEngine } from '../config';
|
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
import computeSimilarity from '../utils/computeSimilarity';
|
import computeSimilarity from '../utils/computeSimilarity';
|
||||||
@ -207,42 +205,25 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
} else {
|
} else {
|
||||||
question = question.replace(/<think>.*?<\/think>/g, '');
|
question = question.replace(/<think>.*?<\/think>/g, '');
|
||||||
|
|
||||||
const searchEngine = getSearchEngine();
|
const res = await searchSearxng(question, {
|
||||||
|
language: 'en',
|
||||||
|
engines: this.config.activeEngines,
|
||||||
|
});
|
||||||
|
|
||||||
let res;
|
const documents = res.results.map(
|
||||||
|
(result) =>
|
||||||
if (searchEngine === 'tavily') {
|
new Document({
|
||||||
res = await searchTavily(question, {
|
pageContent:
|
||||||
search_depth: 'basic',
|
result.content ||
|
||||||
max_results: 15,
|
(this.config.activeEngines.includes('youtube')
|
||||||
include_images: true,
|
? result.title
|
||||||
});
|
: '') /* Todo: Implement transcript grabbing using Youtubei (source: https://www.npmjs.com/package/youtubei) */,
|
||||||
} else {
|
metadata: {
|
||||||
// Default to SearxNG
|
title: result.title,
|
||||||
res = await searchSearxng(question, {
|
url: result.url,
|
||||||
language: 'en',
|
...(result.img_src && { img_src: result.img_src }),
|
||||||
engines: this.config.activeEngines,
|
},
|
||||||
});
|
}),
|
||||||
}
|
|
||||||
|
|
||||||
let documents: Document[] = [];
|
|
||||||
|
|
||||||
documents = documents.concat(
|
|
||||||
res.results.map(
|
|
||||||
(result) =>
|
|
||||||
new Document({
|
|
||||||
pageContent:
|
|
||||||
result.content ||
|
|
||||||
(this.config.activeEngines.includes('youtube')
|
|
||||||
? result.title
|
|
||||||
: ''),
|
|
||||||
metadata: {
|
|
||||||
title: result.title,
|
|
||||||
url: result.url,
|
|
||||||
...(result.img_src ? { img_src: result.img_src } : {}),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
return { query: question, docs: documents };
|
return { query: question, docs: documents };
|
||||||
|
@ -1,79 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getTavilyApiKey } from '../config';
|
|
||||||
|
|
||||||
interface TavilySearchOptions {
|
|
||||||
topic?: 'general' | 'news';
|
|
||||||
search_depth?: 'basic' | 'advanced';
|
|
||||||
chunks_per_source?: number;
|
|
||||||
max_results?: number;
|
|
||||||
time_range?: 'day' | 'week' | 'month' | 'year' | 'd' | 'w' | 'm' | 'y';
|
|
||||||
days?: number;
|
|
||||||
include_answer?: boolean | 'basic' | 'advanced';
|
|
||||||
include_raw_content?: boolean;
|
|
||||||
include_images?: boolean;
|
|
||||||
include_image_descriptions?: boolean;
|
|
||||||
include_domains?: string[];
|
|
||||||
exclude_domains?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TavilySearchResult {
|
|
||||||
title: string;
|
|
||||||
url: string;
|
|
||||||
content: string;
|
|
||||||
score: number;
|
|
||||||
raw_content?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TavilySearchResponse {
|
|
||||||
query: string;
|
|
||||||
answer?: string;
|
|
||||||
images?: Array<{
|
|
||||||
url: string;
|
|
||||||
description?: string;
|
|
||||||
}>;
|
|
||||||
results: TavilySearchResult[];
|
|
||||||
response_time: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchTavily = async (
|
|
||||||
query: string,
|
|
||||||
opts?: TavilySearchOptions,
|
|
||||||
) => {
|
|
||||||
const tavilyApiKey = getTavilyApiKey();
|
|
||||||
|
|
||||||
if (!tavilyApiKey) {
|
|
||||||
throw new Error('Tavily API key is not configured');
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = 'https://api.tavily.com/search';
|
|
||||||
|
|
||||||
const response = await axios.post<TavilySearchResponse>(
|
|
||||||
url,
|
|
||||||
{
|
|
||||||
query,
|
|
||||||
...opts,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': `Bearer ${tavilyApiKey}`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const results = response.data.results;
|
|
||||||
|
|
||||||
// Convert Tavily results to match the format expected by the rest of the application
|
|
||||||
const formattedResults = results.map(result => ({
|
|
||||||
title: result.title,
|
|
||||||
url: result.url,
|
|
||||||
content: result.content,
|
|
||||||
img_src: undefined, // Tavily doesn't provide image URLs in the standard response
|
|
||||||
}));
|
|
||||||
|
|
||||||
return {
|
|
||||||
results: formattedResults,
|
|
||||||
suggestions: [], // Tavily doesn't provide suggestions, so return empty array
|
|
||||||
answer: response.data.answer, // Include the AI-generated answer if available
|
|
||||||
};
|
|
||||||
};
|
|
@ -1,5 +1,5 @@
|
|||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import { getSearxngApiEndpoint } from '../config';
|
import { getSearxngApiEndpoint } from './config';
|
||||||
|
|
||||||
interface SearxngSearchOptions {
|
interface SearxngSearchOptions {
|
||||||
categories?: string[];
|
categories?: string[];
|
@ -64,7 +64,7 @@ export const getDocumentsFromLinks = async ({ links }: { links: string[] }) => {
|
|||||||
const splittedText = await splitter.splitText(parsedText);
|
const splittedText = await splitter.splitText(parsedText);
|
||||||
const title = res.data
|
const title = res.data
|
||||||
.toString('utf8')
|
.toString('utf8')
|
||||||
.match(/<title>(.*?)<\/title>/)?.[1];
|
.match(/<title.*>(.*?)<\/title>/)?.[1];
|
||||||
|
|
||||||
const linkDocs = splittedText.map((text) => {
|
const linkDocs = splittedText.map((text) => {
|
||||||
return new Document({
|
return new Document({
|
||||||
|
Reference in New Issue
Block a user