Compare commits

...

33 Commits

Author SHA1 Message Date
ItzCrazyKns
94e6db10bb feat(weather): add other measurement units, closes #821 #790 2025-07-18 21:09:32 +05:30
ItzCrazyKns
26e1d5fec3 feat(routes): lint & beautify 2025-07-17 22:23:11 +05:30
ItzCrazyKns
66be87b688 Merge branch 'pr/827' 2025-07-17 22:22:50 +05:30
amoshydra
f7b4e32218 fix(discover): provide language when fetching
some engines provide empty response when no language is provided.

fix #618
2025-07-17 02:14:49 +08:00
ItzCrazyKns
57407112fb feat(package): bump version 2025-07-16 10:39:50 +05:30
ItzCrazyKns
b280cc2e01 Merge pull request #787 from chriswritescode-dev/IOS
Fix: IOS Input Zoom / Support PWA Home Screen App, closes #458
2025-07-15 22:10:01 +05:30
ItzCrazyKns
e6ebf892c5 feat(styles): update globals.css 2025-07-15 21:47:20 +05:30
ItzCrazyKns
b754641058 feat(gitignore): add certificates 2025-07-15 21:45:44 +05:30
ItzCrazyKns
722f4f760e feat(manifest): update icons & screenshots 2025-07-15 21:45:37 +05:30
ItzCrazyKns
01e04a209f feat(public): add screenshots & update icons 2025-07-15 21:45:24 +05:30
ItzCrazyKns
0299fd1ea0 Merge pull request #817 from kittrydge/patch-1
Update Linux ollama instructions in README.md
2025-07-15 20:23:02 +05:30
ItzCrazyKns
cf8dec53ca feat(chat-window): select provider if model's present, closes #803 2025-07-07 16:09:36 +05:30
ItzCrazyKns
d5c012d748 Revert "Update ChatWindow.tsx"
This reverts commit 2ccbd9a44c.
2025-07-07 15:52:39 +05:30
ItzCrazyKns
2ccbd9a44c Update ChatWindow.tsx 2025-07-05 22:00:06 +05:30
kittrydge
ccd89d48d9 Update Linux ollama instructions in README.md
When setting the OLLAMA_HOST environment variable, the port number must be specified ( see https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux )

Also, 'systemctl daemon-reload' needs to be called after changing a systemd unit file, and before the relevant systemd service is reloaded.
2025-07-01 18:00:26 -06:00
ItzCrazyKns
87d788ddef Update README.md 2025-06-30 19:55:23 +05:30
ItzCrazyKns
809b625a34 feat(widgets): fix size on smaller screens, closes #791 2025-06-30 15:42:41 +05:30
ItzCrazyKns
95c753a549 Merge branch 'pr/815' 2025-06-30 15:38:31 +05:30
ItzCrazyKns
0bb8b7ec5c feat(weather-widget): enable geolocation for weather data
Replaces the previous commented-out geolocation logic with an implementation that uses the browser's geolocation API and reverse geocoding to determine the user's city. Falls back to approximate location if permission is denied or unavailable.
2025-06-28 13:49:17 +05:30
D1m7asis
c6d084f5dc feat: add AIML API provider
Introduces support for the AI/ML API provider, including configuration options, chat and embedding model loading, and UI integration. Updates documentation and sample config to reflect the new provider.
2025-06-27 13:43:54 +02:00
ItzCrazyKns
0024ce36c8 Merge pull request #784 from Davixk/fix/docs-typo
docs: correct typo in npm start command
2025-06-21 20:27:34 +05:30
ItzCrazyKns
c44e746807 Merge pull request #785 from koyasi777/patch-1
feat(gemini): add Gemini 2.5 Flash & Pro preview models (May 2025)
2025-06-21 20:26:37 +05:30
ItzCrazyKns
b1826066f4 Merge pull request #801 from glitchySid/patch-1
Update README.md
2025-06-21 20:25:41 +05:30
ItzCrazyKns
b0b8acc45b Merge pull request #781 from alckasoc/master
feat(models): Update Gemini 2.5 pro key
2025-06-21 20:25:06 +05:30
Siddhesh Mhatre
e2b9ffc072 Update README.md
Mentioned that Gemini api key can be used in perplexica.
2025-06-11 22:52:13 +05:30
Chris Scott
68c43ea372 Fix: IOS Input Zoom
config for theme consistency and iOS standalone mode
- Modified manifest.ts to ensure proper metadata

- Added display: standalone for iOS PWA behavior
2025-06-02 21:52:41 -04:00
Dave
3b46baca4f docs(readme): fix typo in npm start command 2025-06-02 05:52:31 +02:00
こやし
772e461c08 feat(gemini): add Gemini 2.5 Flash & Pro preview models (May 2025) 2025-06-02 00:30:18 +09:00
Dave
5c6018a0f9 docs: correct typo in npm start command 2025-06-01 06:35:16 +02:00
ItzCrazyKns
0b7989c3d3 feat(empty-chat): remove unused imports 2025-05-30 09:55:06 +05:30
ItzCrazyKns
8cfcc3e39c feat(chat): update margins and spacing 2025-05-30 09:52:36 +05:30
alckasoc
9eba4b7373 Merge branch 'master' of https://github.com/alckasoc/Perplexica 2025-05-29 18:27:00 -07:00
alckasoc
91306dc0c7 update gemini 2.5 pro key 2025-05-29 18:26:36 -07:00
26 changed files with 347 additions and 60 deletions

0
.assets/manifest.json Normal file
View File

2
.gitignore vendored
View File

@@ -37,3 +37,5 @@ Thumbs.db
# Db # Db
db.sqlite db.sqlite
/searxng /searxng
certificates

View File

@@ -16,7 +16,7 @@
<hr/> <hr/>
[![Discord](https://dcbadge.vercel.app/api/server/26aArMy8tT?style=flat&compact=true)](https://discord.gg/26aArMy8tT) [![Discord](https://dcbadge.limes.pink/api/server/26aArMy8tT?style=flat)](https://discord.gg/26aArMy8tT)
![preview](.assets/perplexica-screenshot.png?) ![preview](.assets/perplexica-screenshot.png?)
@@ -90,6 +90,9 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
- `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**.
- `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**.
- `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**.
- `Gemini`: Your Gemini API key. **You only need to fill this if you wish to use Google's models**.
- `DEEPSEEK`: Your Deepseek API key. **Only needed if you want Deepseek models.**
- `AIMLAPI`: Your AI/ML API key. **Only needed if you want to use AI/ML API models and embeddings.**
**Note**: You can change these after starting Perplexica from the settings dialog. **Note**: You can change these after starting Perplexica from the settings dialog.
@@ -111,7 +114,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
2. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file. 2. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file.
3. After populating the configuration run `npm i`. 3. After populating the configuration run `npm i`.
4. Install the dependencies and then execute `npm run build`. 4. Install the dependencies and then execute `npm run build`.
5. Finally, start the app by running `npm rum start` 5. Finally, start the app by running `npm run start`
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
@@ -132,7 +135,7 @@ If you're encountering an Ollama connection error, it is likely due to the backe
3. **Linux Users - Expose Ollama to Network:** 3. **Linux Users - Expose Ollama to Network:**
- Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0"`. Then restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux) - Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0:11434"`. (Change the port number if you are using a different one.) Then reload the systemd manager configuration with `systemctl daemon-reload`, and restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux)
- Ensure that the port (default is 11434) is not blocked by your firewall. - Ensure that the port (default is 11434) is not blocked by your firewall.

View File

@@ -41,6 +41,6 @@ To update Perplexica to the latest version, follow these steps:
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly. 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
4. After populating the configuration run `npm i`. 4. After populating the configuration run `npm i`.
5. Install the dependencies and then execute `npm run build`. 5. Install the dependencies and then execute `npm run build`.
6. Finally, start the app by running `npm rum start` 6. Finally, start the app by running `npm run start`
--- ---

View File

@@ -1,6 +1,6 @@
{ {
"name": "perplexica-frontend", "name": "perplexica-frontend",
"version": "1.11.0-rc1", "version": "1.11.0-rc2",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {

BIN
public/icon-100.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 916 B

BIN
public/icon-50.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 515 B

BIN
public/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

BIN
public/screenshots/p1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 KiB

BIN
public/screenshots/p2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 627 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 202 KiB

View File

@@ -25,6 +25,9 @@ API_URL = "" # Ollama API URL - http://host.docker.internal:11434
[MODELS.DEEPSEEK] [MODELS.DEEPSEEK]
API_KEY = "" API_KEY = ""
[MODELS.AIMLAPI]
API_KEY = "" # Required to use AI/ML API chat and embedding models
[MODELS.LM_STUDIO] [MODELS.LM_STUDIO]
API_URL = "" # LM Studio API URL - http://host.docker.internal:1234 API_URL = "" # LM Studio API URL - http://host.docker.internal:1234

View File

@@ -8,6 +8,7 @@ import {
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiApiKey, getOpenaiApiKey,
getDeepseekApiKey, getDeepseekApiKey,
getAimlApiKey,
getLMStudioApiEndpoint, getLMStudioApiEndpoint,
updateConfig, updateConfig,
} from '@/lib/config'; } from '@/lib/config';
@@ -57,6 +58,7 @@ export const GET = async (req: Request) => {
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey(); config['geminiApiKey'] = getGeminiApiKey();
config['deepseekApiKey'] = getDeepseekApiKey(); config['deepseekApiKey'] = getDeepseekApiKey();
config['aimlApiKey'] = getAimlApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl(); config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey(); config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName(); config['customOpenaiModelName'] = getCustomOpenaiModelName();
@@ -95,6 +97,9 @@ export const POST = async (req: Request) => {
DEEPSEEK: { DEEPSEEK: {
API_KEY: config.deepseekApiKey, API_KEY: config.deepseekApiKey,
}, },
AIMLAPI: {
API_KEY: config.aimlApiKey,
},
LM_STUDIO: { LM_STUDIO: {
API_URL: config.lmStudioApiUrl, API_URL: config.lmStudioApiUrl,
}, },

View File

@@ -36,6 +36,7 @@ export const GET = async (req: Request) => {
{ {
engines: ['bing news'], engines: ['bing news'],
pageno: 1, pageno: 1,
language: 'en',
}, },
) )
).results; ).results;
@@ -49,7 +50,11 @@ export const GET = async (req: Request) => {
data = ( data = (
await searchSearxng( await searchSearxng(
`site:${articleWebsites[Math.floor(Math.random() * articleWebsites.length)]} ${topics[Math.floor(Math.random() * topics.length)]}`, `site:${articleWebsites[Math.floor(Math.random() * articleWebsites.length)]} ${topics[Math.floor(Math.random() * topics.length)]}`,
{ engines: ['bing news'], pageno: 1 }, {
engines: ['bing news'],
pageno: 1,
language: 'en',
},
) )
).results; ).results;
} }

View File

@@ -1,6 +1,7 @@
export const POST = async (req: Request) => { export const POST = async (req: Request) => {
try { try {
const body: { lat: number; lng: number } = await req.json(); const body: { lat: number; lng: number; temperatureUnit: 'C' | 'F' } =
await req.json();
if (!body.lat || !body.lng) { if (!body.lat || !body.lng) {
return Response.json( return Response.json(
@@ -12,7 +13,7 @@ export const POST = async (req: Request) => {
} }
const res = await fetch( const res = await fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}&current=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto`, `https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}&current=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${body.temperatureUnit === 'C' ? '' : '&temperature_unit=fahrenheit'}`,
); );
const data = await res.json(); const data = await res.json();
@@ -33,12 +34,14 @@ export const POST = async (req: Request) => {
humidity: number; humidity: number;
windSpeed: number; windSpeed: number;
icon: string; icon: string;
temperatureUnit: 'C' | 'F';
} = { } = {
temperature: data.current.temperature_2m, temperature: data.current.temperature_2m,
condition: '', condition: '',
humidity: data.current.relative_humidity_2m, humidity: data.current.relative_humidity_2m,
windSpeed: data.current.wind_speed_10m, windSpeed: data.current.wind_speed_10m,
icon: '', icon: '',
temperatureUnit: body.temperatureUnit,
}; };
const code = data.current.weather_code; const code = data.current.weather_code;

View File

@@ -11,3 +11,11 @@
display: none; display: none;
} }
} }
@media screen and (-webkit-min-device-pixel-ratio: 0) {
select,
textarea,
input {
font-size: 16px !important;
}
}

54
src/app/manifest.ts Normal file
View File

@@ -0,0 +1,54 @@
import type { MetadataRoute } from 'next';
export default function manifest(): MetadataRoute.Manifest {
return {
name: 'Perplexica - Chat with the internet',
short_name: 'Perplexica',
description:
'Perplexica is an AI powered chatbot that is connected to the internet.',
start_url: '/',
display: 'standalone',
background_color: '#0a0a0a',
theme_color: '#0a0a0a',
screenshots: [
{
src: '/screenshots/p1.png',
form_factor: 'wide',
sizes: '2560x1600',
},
{
src: '/screenshots/p2.png',
form_factor: 'wide',
sizes: '2560x1600',
},
{
src: '/screenshots/p1_small.png',
form_factor: 'narrow',
sizes: '828x1792',
},
{
src: '/screenshots/p2_small.png',
form_factor: 'narrow',
sizes: '828x1792',
},
],
icons: [
{
src: '/icon-50.png',
sizes: '50x50',
type: 'image/png' as const,
},
{
src: '/icon-100.png',
sizes: '100x100',
type: 'image/png',
},
{
src: '/icon.png',
sizes: '440x440',
type: 'image/png',
purpose: 'any',
},
],
};
}

View File

@@ -23,6 +23,7 @@ interface SettingsType {
ollamaApiUrl: string; ollamaApiUrl: string;
lmStudioApiUrl: string; lmStudioApiUrl: string;
deepseekApiKey: string; deepseekApiKey: string;
aimlApiKey: string;
customOpenaiApiKey: string; customOpenaiApiKey: string;
customOpenaiApiUrl: string; customOpenaiApiUrl: string;
customOpenaiModelName: string; customOpenaiModelName: string;
@@ -147,6 +148,7 @@ const Page = () => {
const [automaticImageSearch, setAutomaticImageSearch] = useState(false); const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false); const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
const [systemInstructions, setSystemInstructions] = useState<string>(''); const [systemInstructions, setSystemInstructions] = useState<string>('');
const [temperatureUnit, setTemperatureUnit] = useState<'C' | 'F'>('C');
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({}); const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
useEffect(() => { useEffect(() => {
@@ -209,6 +211,8 @@ const Page = () => {
setSystemInstructions(localStorage.getItem('systemInstructions')!); setSystemInstructions(localStorage.getItem('systemInstructions')!);
setTemperatureUnit(localStorage.getItem('temperatureUnit')! as 'C' | 'F');
setIsLoading(false); setIsLoading(false);
}; };
@@ -367,6 +371,8 @@ const Page = () => {
localStorage.setItem('embeddingModel', value); localStorage.setItem('embeddingModel', value);
} else if (key === 'systemInstructions') { } else if (key === 'systemInstructions') {
localStorage.setItem('systemInstructions', value); localStorage.setItem('systemInstructions', value);
} else if (key === 'temperatureUnit') {
localStorage.setItem('temperatureUnit', value.toString());
} }
} catch (err) { } catch (err) {
console.error('Failed to save:', err); console.error('Failed to save:', err);
@@ -415,13 +421,35 @@ const Page = () => {
) : ( ) : (
config && ( config && (
<div className="flex flex-col space-y-6 pb-28 lg:pb-8"> <div className="flex flex-col space-y-6 pb-28 lg:pb-8">
<SettingsSection title="Appearance"> <SettingsSection title="Preferences">
<div className="flex flex-col space-y-1"> <div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
Theme Theme
</p> </p>
<ThemeSwitcher /> <ThemeSwitcher />
</div> </div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Temperature Unit
</p>
<Select
value={temperatureUnit ?? undefined}
onChange={(e) => {
setTemperatureUnit(e.target.value as 'C' | 'F');
saveConfig('temperatureUnit', e.target.value);
}}
options={[
{
label: 'Celsius',
value: 'C',
},
{
label: 'Fahrenheit',
value: 'F',
},
]}
/>
</div>
</SettingsSection> </SettingsSection>
<SettingsSection title="Automatic Search"> <SettingsSection title="Automatic Search">
@@ -515,7 +543,7 @@ const Page = () => {
<SettingsSection title="System Instructions"> <SettingsSection title="System Instructions">
<div className="flex flex-col space-y-4"> <div className="flex flex-col space-y-4">
<Textarea <Textarea
value={systemInstructions} value={systemInstructions ?? undefined}
isSaving={savingStates['systemInstructions']} isSaving={savingStates['systemInstructions']}
onChange={(e) => { onChange={(e) => {
setSystemInstructions(e.target.value); setSystemInstructions(e.target.value);
@@ -862,6 +890,25 @@ const Page = () => {
/> />
</div> </div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
AI/ML API Key
</p>
<Input
type="text"
placeholder="AI/ML API Key"
value={config.aimlApiKey}
isSaving={savingStates['aimlApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
aimlApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('aimlApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1"> <div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
LM Studio API URL LM Studio API URL

View File

@@ -82,14 +82,29 @@ const checkConfig = async (
) { ) {
if (!chatModel || !chatModelProvider) { if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders; const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider = chatModelProvider =
chatModelProvider || Object.keys(chatModelProviders)[0]; chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
if (!chatModelProviders || Object.keys(chatModelProviders).length === 0)
return toast.error('No chat models available');
} }
if (!embeddingModel || !embeddingModelProvider) { if (!embeddingModel || !embeddingModelProvider) {
@@ -117,7 +132,8 @@ const checkConfig = async (
if ( if (
Object.keys(chatModelProviders).length > 0 && Object.keys(chatModelProviders).length > 0 &&
!chatModelProviders[chatModelProvider] (!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) { ) {
const chatModelProvidersKeys = Object.keys(chatModelProviders); const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider = chatModelProvider =
@@ -132,6 +148,16 @@ const checkConfig = async (
chatModelProvider && chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel] !chatModelProviders[chatModelProvider][chatModel]
) { ) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys( chatModel = Object.keys(
chatModelProviders[ chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0 Object.keys(chatModelProviders[chatModelProvider]).length > 0
@@ -139,6 +165,7 @@ const checkConfig = async (
: Object.keys(chatModelProviders)[0] : Object.keys(chatModelProviders)[0]
], ],
)[0]; )[0];
localStorage.setItem('chatModel', chatModel); localStorage.setItem('chatModel', chatModel);
} }

View File

@@ -1,6 +1,5 @@
import { Settings } from 'lucide-react'; import { Settings } from 'lucide-react';
import EmptyChatMessageInput from './EmptyChatMessageInput'; import EmptyChatMessageInput from './EmptyChatMessageInput';
import { useEffect, useState } from 'react';
import { File } from './ChatWindow'; import { File } from './ChatWindow';
import Link from 'next/link'; import Link from 'next/link';
import WeatherWidget from './WeatherWidget'; import WeatherWidget from './WeatherWidget';
@@ -34,7 +33,8 @@ const EmptyChat = ({
<Settings className="cursor-pointer lg:hidden" /> <Settings className="cursor-pointer lg:hidden" />
</Link> </Link>
</div> </div>
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8"> <div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-4">
<div className="flex flex-col items-center justify-center w-full space-y-8">
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8"> <h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
Research begins here. Research begins here.
</h2> </h2>
@@ -49,11 +49,12 @@ const EmptyChat = ({
files={files} files={files}
setFiles={setFiles} setFiles={setFiles}
/> />
</div>
<div className="flex flex-col w-full gap-4 mt-2 sm:flex-row sm:justify-center"> <div className="flex flex-col w-full gap-4 mt-2 sm:flex-row sm:justify-center">
<div className="flex-1 max-w-xs"> <div className="flex-1 w-full">
<WeatherWidget /> <WeatherWidget />
</div> </div>
<div className="flex-1 max-w-xs"> <div className="flex-1 w-full">
<NewsArticleWidget /> <NewsArticleWidget />
</div> </div>
</div> </div>

View File

@@ -9,7 +9,9 @@ const WeatherWidget = () => {
humidity: 0, humidity: 0,
windSpeed: 0, windSpeed: 0,
icon: '', icon: '',
temperatureUnit: 'C',
}); });
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
useEffect(() => { useEffect(() => {
@@ -31,30 +33,40 @@ const WeatherWidget = () => {
city: string; city: string;
}) => void, }) => void,
) => { ) => {
/*
// Geolocation doesn't give city so we'll country using ipapi for now
if (navigator.geolocation) { if (navigator.geolocation) {
const result = await navigator.permissions.query({ const result = await navigator.permissions.query({
name: 'geolocation', name: 'geolocation',
}) });
if (result.state === 'granted') { if (result.state === 'granted') {
navigator.geolocation.getCurrentPosition(position => { navigator.geolocation.getCurrentPosition(async (position) => {
const res = await fetch(
`https://api-bdc.io/data/reverse-geocode-client?latitude=${position.coords.latitude}&longitude=${position.coords.longitude}&localityLanguage=en`,
{
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
},
);
const data = await res.json();
callback({ callback({
latitude: position.coords.latitude, latitude: position.coords.latitude,
longitude: position.coords.longitude, longitude: position.coords.longitude,
}) city: data.locality,
}) });
});
} else if (result.state === 'prompt') { } else if (result.state === 'prompt') {
callback(await getApproxLocation()) callback(await getApproxLocation());
navigator.geolocation.getCurrentPosition(position => {}) navigator.geolocation.getCurrentPosition((position) => {});
} else if (result.state === 'denied') { } else if (result.state === 'denied') {
callback(await getApproxLocation()) callback(await getApproxLocation());
} }
} else { } else {
callback(await getApproxLocation())
} */
callback(await getApproxLocation()); callback(await getApproxLocation());
}
}; };
getLocation(async (location) => { getLocation(async (location) => {
@@ -63,6 +75,7 @@ const WeatherWidget = () => {
body: JSON.stringify({ body: JSON.stringify({
lat: location.latitude, lat: location.latitude,
lng: location.longitude, lng: location.longitude,
temperatureUnit: localStorage.getItem('temperatureUnit') ?? 'C',
}), }),
}); });
@@ -81,6 +94,7 @@ const WeatherWidget = () => {
humidity: data.humidity, humidity: data.humidity,
windSpeed: data.windSpeed, windSpeed: data.windSpeed,
icon: data.icon, icon: data.icon,
temperatureUnit: data.temperatureUnit,
}); });
setLoading(false); setLoading(false);
}); });
@@ -115,7 +129,7 @@ const WeatherWidget = () => {
className="h-10 w-auto" className="h-10 w-auto"
/> />
<span className="text-base font-semibold text-black dark:text-white"> <span className="text-base font-semibold text-black dark:text-white">
{data.temperature}°C {data.temperature}°{data.temperatureUnit}
</span> </span>
</div> </div>
<div className="flex flex-col justify-between flex-1 h-full py-1"> <div className="flex flex-col justify-between flex-1 h-full py-1">

View File

@@ -35,6 +35,9 @@ interface Config {
DEEPSEEK: { DEEPSEEK: {
API_KEY: string; API_KEY: string;
}; };
AIMLAPI: {
API_KEY: string;
};
LM_STUDIO: { LM_STUDIO: {
API_URL: string; API_URL: string;
}; };
@@ -85,6 +88,8 @@ export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY; export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
export const getCustomOpenaiApiKey = () => export const getCustomOpenaiApiKey = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY; loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;

View File

@@ -0,0 +1,94 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { getAimlApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
import axios from 'axios';
export const PROVIDER_INFO = {
key: 'aimlapi',
displayName: 'AI/ML API',
};
interface AimlApiModel {
id: string;
name?: string;
type?: string;
}
const API_URL = 'https://api.aimlapi.com';
export const loadAimlApiChatModels = async () => {
const apiKey = getAimlApiKey();
if (!apiKey) return {};
try {
const response = await axios.get(`${API_URL}/models`, {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
});
const chatModels: Record<string, ChatModel> = {};
response.data.data.forEach((model: AimlApiModel) => {
if (model.type === 'chat-completion') {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
openAIApiKey: apiKey,
modelName: model.id,
temperature: 0.7,
configuration: {
baseURL: API_URL,
},
}) as unknown as BaseChatModel,
};
}
});
return chatModels;
} catch (err) {
console.error(`Error loading AI/ML API models: ${err}`);
return {};
}
};
export const loadAimlApiEmbeddingModels = async () => {
const apiKey = getAimlApiKey();
if (!apiKey) return {};
try {
const response = await axios.get(`${API_URL}/models`, {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
});
const embeddingModels: Record<string, EmbeddingModel> = {};
response.data.data.forEach((model: AimlApiModel) => {
if (model.type === 'embedding') {
embeddingModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
openAIApiKey: apiKey,
modelName: model.id,
configuration: {
baseURL: API_URL,
},
}) as unknown as Embeddings,
};
}
});
return embeddingModels;
} catch (err) {
console.error(`Error loading AI/ML API embeddings models: ${err}`);
return {};
}
};

View File

@@ -13,9 +13,17 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings'; import { Embeddings } from '@langchain/core/embeddings';
const geminiChatModels: Record<string, string>[] = [ const geminiChatModels: Record<string, string>[] = [
{
displayName: 'Gemini 2.5 Flash Preview 05-20',
key: 'gemini-2.5-flash-preview-05-20',
},
{
displayName: 'Gemini 2.5 Pro Preview',
key: 'gemini-2.5-pro-preview-05-06',
},
{ {
displayName: 'Gemini 2.5 Pro Experimental', displayName: 'Gemini 2.5 Pro Experimental',
key: 'gemini-2.5-pro-exp-03-25', key: 'gemini-2.5-pro-preview-05-06',
}, },
{ {
displayName: 'Gemini 2.0 Flash', displayName: 'Gemini 2.0 Flash',

View File

@@ -35,6 +35,11 @@ import {
loadDeepseekChatModels, loadDeepseekChatModels,
PROVIDER_INFO as DeepseekInfo, PROVIDER_INFO as DeepseekInfo,
} from './deepseek'; } from './deepseek';
import {
loadAimlApiChatModels,
loadAimlApiEmbeddingModels,
PROVIDER_INFO as AimlApiInfo,
} from './aimlapi';
import { import {
loadLMStudioChatModels, loadLMStudioChatModels,
loadLMStudioEmbeddingsModels, loadLMStudioEmbeddingsModels,
@@ -49,6 +54,7 @@ export const PROVIDER_METADATA = {
gemini: GeminiInfo, gemini: GeminiInfo,
transformers: TransformersInfo, transformers: TransformersInfo,
deepseek: DeepseekInfo, deepseek: DeepseekInfo,
aimlapi: AimlApiInfo,
lmstudio: LMStudioInfo, lmstudio: LMStudioInfo,
custom_openai: { custom_openai: {
key: 'custom_openai', key: 'custom_openai',
@@ -76,6 +82,7 @@ export const chatModelProviders: Record<
anthropic: loadAnthropicChatModels, anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels, gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels, deepseek: loadDeepseekChatModels,
aimlapi: loadAimlApiChatModels,
lmstudio: loadLMStudioChatModels, lmstudio: loadLMStudioChatModels,
}; };
@@ -87,6 +94,7 @@ export const embeddingModelProviders: Record<
ollama: loadOllamaEmbeddingModels, ollama: loadOllamaEmbeddingModels,
gemini: loadGeminiEmbeddingModels, gemini: loadGeminiEmbeddingModels,
transformers: loadTransformersEmbeddingsModels, transformers: loadTransformersEmbeddingsModels,
aimlapi: loadAimlApiEmbeddingModels,
lmstudio: loadLMStudioEmbeddingsModels, lmstudio: loadLMStudioEmbeddingsModels,
}; };