Compare commits

...

51 Commits

Author SHA1 Message Date
59ab10110a Merge branch 'master' of https://github.com/HadiCherkaoui/Perplexica into pr/658 2025-03-02 15:35:14 +05:30
10f9cd2f79 feat(app): lint & beautify 2025-03-02 15:35:08 +05:30
82e1dd73b0 add config to ui 2025-03-02 11:00:56 +01:00
728b499281 Merge branch 'ItzCrazyKns:master' into master 2025-03-01 18:16:08 +01:00
5a4dafc753 Update yarn.lock 2025-03-01 18:12:40 +01:00
4ac99786f0 Update yarn.lock 2025-03-01 18:11:34 +01:00
1224281278 Update yarn.lock 2025-03-01 18:09:57 +01:00
3daae29a5d Update package.json 2025-03-01 18:05:28 +01:00
50bcaa13f2 Revert "Delete package.json"
This reverts commit 31e4abf068.
2025-03-01 17:58:54 +01:00
31e4abf068 Delete package.json 2025-03-01 17:57:40 +01:00
fd6e701cf0 Delete docs/installation/DEVELOPMENT.md 2025-03-01 17:49:59 +01:00
89880a2555 Delete .dev directory 2025-03-01 17:48:54 +01:00
07776d8699 feat(config): remove key cloak config functions 2025-03-01 22:13:38 +05:30
a24992a3db Merge pull request #655 from ShortCipher5/patch-1
chore: Add Sealos 1-click deployment
2025-03-01 21:56:01 +05:30
32fb6ac131 Update config.ts 2025-03-01 14:16:39 +01:00
99137d95e7 Update docker-compose.yaml 2025-03-01 14:15:44 +01:00
490a8db538 Update sample.config.toml 2025-03-01 14:15:21 +01:00
aba702c51b make it possible to configure multiple engines 2025-02-28 15:50:59 +01:00
89a6e7fbb1 Merge pull request #11 from HadiCherkaoui/add-bing
add bing
2025-02-28 15:17:40 +01:00
f19d2e3a97 add bing 2025-02-28 15:16:54 +01:00
4a7ca8fc68 Add keycloak config params to sample config 2025-02-28 15:07:50 +01:00
3d642f2539 fix sample messup 2025-02-28 14:58:16 +01:00
aa91d3bc60 Add config params for keycloak 2025-02-28 14:56:33 +01:00
93c5ed46f6 Merge pull request #9 from HadiCherkaoui/add-bing
Added brave, yacy and changed back the default search engine to searxng
2025-02-28 14:49:58 +01:00
af4b97b766 add yacy 2025-02-28 14:46:24 +01:00
ca86a7e358 add brave as a search engine 2025-02-28 13:11:00 +01:00
99351fc2a6 Added multiple search backends
Add support for multiple search engines/google as a search engine
2025-02-28 09:33:50 +01:00
7a816efc04 fix google pse
i tested everything, also tested if invalid api key and such and regression tested searxng which still works
2025-02-28 09:13:11 +01:00
d584067bb1 Update README.md 2025-02-27 23:26:45 -08:00
4d41243108 added multi search engine support (didnt test) WIP 2025-02-27 21:32:26 +01:00
6c218b5fee move searxng.ts and update imports 2025-02-27 18:24:54 +01:00
1c1f31e23a Update .gitignore 2025-02-27 18:21:36 +01:00
5b15bcfe17 Add config for multiple search engines 2025-02-27 18:21:00 +01:00
df4350f966 Merge branch 'master' of https://github.com/ItzCrazyKns/Perplexica 2025-02-26 10:40:34 +05:30
652ca2fdf4 Merge pull request #649 from QuietlyChan/fix/light-theme-ui-bug
fix(ui): improve dark mode text color for attachment buttons
2025-02-26 10:36:41 +05:30
216576128d fix(ui): update attachment text color for light and dark modes 2025-02-25 19:26:58 +08:00
bb3f180583 fix(ui): improve dark mode text color for attachment buttons 2025-02-25 17:26:33 +08:00
4d24d73161 Merge pull request #631 from user1007017/patch-1
Update README.md grammatical error
2025-02-20 10:37:33 +05:30
2e166c217b fix(MessageBox): break too long message title 2025-02-19 10:34:51 +08:00
4c73caadf6 feat(custom-openai): save live changes 2025-02-17 16:24:41 +05:30
5f0b87f4a9 Update README.md 2025-02-15 19:06:46 +01:00
115e6b2a71 Merge branch 'master' of https://github.com/ItzCrazyKns/Perplexica 2025-02-15 12:52:30 +05:30
a5c79c92ed feat(settings): add embedding provider settings 2025-02-15 12:52:27 +05:30
db3cea446e Update UPDATING.md 2025-02-15 12:33:43 +05:30
8e683d266a feat(package): bump version 2025-02-15 12:12:57 +05:30
e9ab425cee feat(sample-config): remove unused field 2025-02-15 11:34:14 +05:30
811c0c6fe1 Merge branch 'master' of https://github.com/ItzCrazyKns/Perplexica 2025-02-15 11:31:20 +05:30
cab1aa705c feat(settings): add new settings page 2025-02-15 11:31:08 +05:30
5cbc512322 feat(app): add auto video & image search 2025-02-15 11:29:59 +05:30
41d056e755 feat(handlers): use new custom openai 2025-02-15 11:29:08 +05:30
07dc7d7649 feat(config): update config & custom openai 2025-02-15 11:26:38 +05:30
37 changed files with 2223 additions and 805 deletions

4
.gitignore vendored
View File

@ -2,6 +2,7 @@
node_modules/ node_modules/
npm-debug.log npm-debug.log
yarn-error.log yarn-error.log
package-lock.json
# Build output # Build output
/.next/ /.next/
@ -37,3 +38,6 @@ Thumbs.db
# Db # Db
db.sqlite db.sqlite
/searxng /searxng
# Dev
docker-compose-dev.yaml

View File

@ -2,7 +2,6 @@
[![Discord](https://dcbadge.vercel.app/api/server/26aArMy8tT?style=flat&compact=true)](https://discord.gg/26aArMy8tT) [![Discord](https://dcbadge.vercel.app/api/server/26aArMy8tT?style=flat&compact=true)](https://discord.gg/26aArMy8tT)
![preview](.assets/perplexica-screenshot.png?) ![preview](.assets/perplexica-screenshot.png?)
## Table of Contents <!-- omit in toc --> ## Table of Contents <!-- omit in toc -->
@ -44,7 +43,7 @@ Want to know more about its architecture and how it works? You can read it [here
- **Normal Mode:** Processes your query and performs a web search. - **Normal Mode:** Processes your query and performs a web search.
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes: - **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
- **All Mode:** Searches the entire web to find the best results. - **All Mode:** Searches the entire web to find the best results.
- **Writing Assistant Mode:** Helpful for writing tasks that does not require searching the web. - **Writing Assistant Mode:** Helpful for writing tasks that do not require searching the web.
- **Academic Search Mode:** Finds articles and papers, ideal for academic research. - **Academic Search Mode:** Finds articles and papers, ideal for academic research.
- **YouTube Search Mode:** Finds YouTube videos based on the search query. - **YouTube Search Mode:** Finds YouTube videos based on the search query.
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha. - **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
@ -143,6 +142,7 @@ You can access Perplexica over your home network by following our networking gui
## One-Click Deployment ## One-Click Deployment
[![Deploy to Sealos](https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg)](https://usw.sealos.io/?openapp=system-template%3FtemplateName%3Dperplexica)
[![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267) [![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267)
## Upcoming Features ## Upcoming Features

View File

@ -4,7 +4,7 @@ services:
volumes: volumes:
- ./searxng:/etc/searxng:rw - ./searxng:/etc/searxng:rw
ports: ports:
- 4000:8080 - '4000:8080'
networks: networks:
- perplexica-network - perplexica-network
restart: unless-stopped restart: unless-stopped
@ -19,7 +19,7 @@ services:
depends_on: depends_on:
- searxng - searxng
ports: ports:
- 3001:3001 - '3001:3001'
volumes: volumes:
- backend-dbstore:/home/perplexica/data - backend-dbstore:/home/perplexica/data
- uploads:/home/perplexica/uploads - uploads:/home/perplexica/uploads
@ -41,7 +41,7 @@ services:
depends_on: depends_on:
- perplexica-backend - perplexica-backend
ports: ports:
- 3000:3000 - '3000:3000'
networks: networks:
- perplexica-network - perplexica-network
restart: unless-stopped restart: unless-stopped

View File

@ -7,34 +7,43 @@ To update Perplexica to the latest version, follow these steps:
1. Clone the latest version of Perplexica from GitHub: 1. Clone the latest version of Perplexica from GitHub:
```bash ```bash
git clone https://github.com/ItzCrazyKns/Perplexica.git git clone https://github.com/ItzCrazyKns/Perplexica.git
``` ```
2. Navigate to the Project Directory. 2. Navigate to the project directory.
3. Pull latest images from registry. 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
4. Pull the latest images from the registry.
```bash ```bash
docker compose pull docker compose pull
``` ```
4. Update and Recreate containers. 5. Update and recreate the containers.
```bash ```bash
docker compose up -d docker compose up -d
``` ```
5. Once the command completes running go to http://localhost:3000 and verify the latest changes. 6. Once the command completes, go to http://localhost:3000 and verify the latest changes.
## For non Docker users ## For non-Docker users
1. Clone the latest version of Perplexica from GitHub: 1. Clone the latest version of Perplexica from GitHub:
```bash ```bash
git clone https://github.com/ItzCrazyKns/Perplexica.git git clone https://github.com/ItzCrazyKns/Perplexica.git
``` ```
2. Navigate to the Project Directory 2. Navigate to the project directory.
3. Execute `npm i` in both the `ui` folder and the root directory.
4. Once packages are updated, execute `npm run build` in both the `ui` folder and the root directory. 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
5. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
4. Execute `npm i` in both the `ui` folder and the root directory.
5. Once the packages are updated, execute `npm run build` in both the `ui` folder and the root directory.
6. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
---

View File

@ -1,6 +1,6 @@
{ {
"name": "perplexica-backend", "name": "perplexica-backend",
"version": "1.10.0-rc2", "version": "1.10.0-rc3",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {
@ -30,8 +30,8 @@
"@iarna/toml": "^2.2.5", "@iarna/toml": "^2.2.5",
"@langchain/anthropic": "^0.2.3", "@langchain/anthropic": "^0.2.3",
"@langchain/community": "^0.2.16", "@langchain/community": "^0.2.16",
"@langchain/openai": "^0.0.25",
"@langchain/google-genai": "^0.0.23", "@langchain/google-genai": "^0.0.23",
"@langchain/openai": "^0.0.25",
"@xenova/transformers": "^2.17.1", "@xenova/transformers": "^2.17.1",
"axios": "^1.6.8", "axios": "^1.6.8",
"better-sqlite3": "^11.0.0", "better-sqlite3": "^11.0.0",

View File

@ -3,12 +3,43 @@ PORT = 3001 # Port to run the server on
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m") KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
[API_KEYS] [SEARCH_ENGINE_BACKENDS] # "google" | "searxng" | "bing" | "brave" | "yacy"
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef SEARCH = "searxng"
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef IMAGE = "searxng"
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef VIDEO = "searxng"
GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef NEWS = "searxng"
[API_ENDPOINTS] [MODELS.OPENAI]
SEARXNG = "http://localhost:32768" # SearxNG API URL API_KEY = ""
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
[MODELS.GROQ]
API_KEY = ""
[MODELS.ANTHROPIC]
API_KEY = ""
[MODELS.GEMINI]
API_KEY = ""
[MODELS.CUSTOM_OPENAI]
API_KEY = ""
API_URL = ""
[MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
[SEARCH_ENGINES.GOOGLE]
API_KEY = ""
CSE_ID = ""
[SEARCH_ENGINES.SEARXNG]
ENDPOINT = ""
[SEARCH_ENGINES.BING]
SUBSCRIPTION_KEY = ""
[SEARCH_ENGINES.BRAVE]
API_KEY = ""
[SEARCH_ENGINES.YACY]
ENDPOINT = ""

View File

@ -15,3 +15,5 @@ server:
engines: engines:
- name: wolframalpha - name: wolframalpha
disabled: false disabled: false
- name: qwant
disabled: true

View File

@ -7,7 +7,12 @@ import { PromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages'; import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers'; import { StringOutputParser } from '@langchain/core/output_parsers';
import { searchSearxng } from '../lib/searxng'; import { searchSearxng } from '../lib/searchEngines/searxng';
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
import { searchBraveAPI } from '../lib/searchEngines/brave';
import { searchYaCy } from '../lib/searchEngines/yacy';
import { searchBingAPI } from '../lib/searchEngines/bing';
import { getImageSearchEngineBackend } from '../config';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
const imageSearchChainPrompt = ` const imageSearchChainPrompt = `
@ -36,6 +41,103 @@ type ImageSearchChainInput = {
query: string; query: string;
}; };
async function performImageSearch(query: string) {
const searchEngine = getImageSearchEngineBackend();
let images = [];
switch (searchEngine) {
case 'google': {
const googleResult = await searchGooglePSE(query);
images = googleResult.results
.map((result) => {
if (result.img_src && result.url && result.title) {
return {
img_src: result.img_src,
url: result.url,
title: result.title,
source: result.displayLink,
};
}
})
.filter(Boolean);
break;
}
case 'searxng': {
const searxResult = await searchSearxng(query, {
engines: ['google images', 'bing images'],
pageno: 1,
});
searxResult.results.forEach((result) => {
if (result.img_src && result.url && result.title) {
images.push({
img_src: result.img_src,
url: result.url,
title: result.title,
});
}
});
break;
}
case 'brave': {
const braveResult = await searchBraveAPI(query);
images = braveResult.results
.map((result) => {
if (result.img_src && result.url && result.title) {
return {
img_src: result.img_src,
url: result.url,
title: result.title,
source: result.url,
};
}
})
.filter(Boolean);
break;
}
case 'yacy': {
const yacyResult = await searchYaCy(query);
images = yacyResult.results
.map((result) => {
if (result.img_src && result.url && result.title) {
return {
img_src: result.img_src,
url: result.url,
title: result.title,
source: result.url,
};
}
})
.filter(Boolean);
break;
}
case 'bing': {
const bingResult = await searchBingAPI(query);
images = bingResult.results
.map((result) => {
if (result.img_src && result.url && result.title) {
return {
img_src: result.img_src,
url: result.url,
title: result.title,
source: result.url,
};
}
})
.filter(Boolean);
break;
}
default:
throw new Error(`Unknown search engine ${searchEngine}`);
}
return images;
}
const strParser = new StringOutputParser(); const strParser = new StringOutputParser();
const createImageSearchChain = (llm: BaseChatModel) => { const createImageSearchChain = (llm: BaseChatModel) => {
@ -52,22 +154,7 @@ const createImageSearchChain = (llm: BaseChatModel) => {
llm, llm,
strParser, strParser,
RunnableLambda.from(async (input: string) => { RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, { const images = await performImageSearch(input);
engines: ['bing images', 'google images'],
});
const images = [];
res.results.forEach((result) => {
if (result.img_src && result.url && result.title) {
images.push({
img_src: result.img_src,
url: result.url,
title: result.title,
});
}
});
return images.slice(0, 10); return images.slice(0, 10);
}), }),
]); ]);

View File

@ -7,7 +7,11 @@ import { PromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages'; import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers'; import { StringOutputParser } from '@langchain/core/output_parsers';
import { searchSearxng } from '../lib/searxng'; import { searchSearxng } from '../lib/searchEngines/searxng';
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
import { searchBraveAPI } from '../lib/searchEngines/brave';
import { searchBingAPI } from '../lib/searchEngines/bing';
import { getVideoSearchEngineBackend } from '../config';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
const VideoSearchChainPrompt = ` const VideoSearchChainPrompt = `
@ -38,27 +42,36 @@ type VideoSearchChainInput = {
const strParser = new StringOutputParser(); const strParser = new StringOutputParser();
const createVideoSearchChain = (llm: BaseChatModel) => { async function performVideoSearch(query: string) {
return RunnableSequence.from([ const searchEngine = getVideoSearchEngineBackend();
RunnableMap.from({ const youtubeQuery = `${query} site:youtube.com`;
chat_history: (input: VideoSearchChainInput) => { let videos = [];
return formatChatHistoryAsString(input.chat_history);
}, switch (searchEngine) {
query: (input: VideoSearchChainInput) => { case 'google': {
return input.query; const googleResult = await searchGooglePSE(youtubeQuery);
}, googleResult.results.forEach((result) => {
}), // Use .results instead of .originalres
PromptTemplate.fromTemplate(VideoSearchChainPrompt), if (result.img_src && result.url && result.title) {
llm, const videoId = new URL(result.url).searchParams.get('v');
strParser, videos.push({
RunnableLambda.from(async (input: string) => { img_src: result.img_src,
const res = await searchSearxng(input, { url: result.url,
title: result.title,
iframe_src: videoId
? `https://www.youtube.com/embed/${videoId}`
: null,
});
}
});
break;
}
case 'searxng': {
const searxResult = await searchSearxng(query, {
engines: ['youtube'], engines: ['youtube'],
}); });
searxResult.results.forEach((result) => {
const videos = [];
res.results.forEach((result) => {
if ( if (
result.thumbnail && result.thumbnail &&
result.url && result.url &&
@ -73,7 +86,73 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
}); });
} }
}); });
break;
}
case 'brave': {
const braveResult = await searchBraveAPI(youtubeQuery);
braveResult.results.forEach((result) => {
if (result.img_src && result.url && result.title) {
const videoId = new URL(result.url).searchParams.get('v');
videos.push({
img_src: result.img_src,
url: result.url,
title: result.title,
iframe_src: videoId
? `https://www.youtube.com/embed/${videoId}`
: null,
});
}
});
break;
}
case 'yacy': {
console.log('Not available for yacy');
videos = [];
break;
}
case 'bing': {
const bingResult = await searchBingAPI(youtubeQuery);
bingResult.results.forEach((result) => {
if (result.img_src && result.url && result.title) {
const videoId = new URL(result.url).searchParams.get('v');
videos.push({
img_src: result.img_src,
url: result.url,
title: result.title,
iframe_src: videoId
? `https://www.youtube.com/embed/${videoId}`
: null,
});
}
});
break;
}
default:
throw new Error(`Unknown search engine ${searchEngine}`);
}
return videos;
}
const createVideoSearchChain = (llm: BaseChatModel) => {
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: VideoSearchChainInput) => {
return formatChatHistoryAsString(input.chat_history);
},
query: (input: VideoSearchChainInput) => {
return input.query;
},
}),
PromptTemplate.fromTemplate(VideoSearchChainPrompt),
llm,
strParser,
RunnableLambda.from(async (input: string) => {
const videos = await performVideoSearch(input);
return videos.slice(0, 10); return videos.slice(0, 10);
}), }),
]); ]);

View File

@ -10,15 +10,51 @@ interface Config {
SIMILARITY_MEASURE: string; SIMILARITY_MEASURE: string;
KEEP_ALIVE: string; KEEP_ALIVE: string;
}; };
API_KEYS: { SEARCH_ENGINE_BACKENDS: {
OPENAI: string; SEARCH: string;
GROQ: string; IMAGE: string;
ANTHROPIC: string; VIDEO: string;
GEMINI: string; NEWS: string;
}; };
API_ENDPOINTS: { MODELS: {
SEARXNG: string; OPENAI: {
OLLAMA: string; API_KEY: string;
};
GROQ: {
API_KEY: string;
};
ANTHROPIC: {
API_KEY: string;
};
GEMINI: {
API_KEY: string;
};
OLLAMA: {
API_URL: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
MODEL_NAME: string;
};
};
SEARCH_ENGINES: {
GOOGLE: {
API_KEY: string;
CSE_ID: string;
};
SEARXNG: {
ENDPOINT: string;
};
BING: {
SUBSCRIPTION_KEY: string;
};
BRAVE: {
API_KEY: string;
};
YACY: {
ENDPOINT: string;
};
}; };
} }
@ -38,42 +74,89 @@ export const getSimilarityMeasure = () =>
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE; export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC; export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI; export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
export const getSearchEngineBackend = () =>
loadConfig().SEARCH_ENGINE_BACKENDS.SEARCH;
export const getImageSearchEngineBackend = () =>
loadConfig().SEARCH_ENGINE_BACKENDS.IMAGE || getSearchEngineBackend();
export const getVideoSearchEngineBackend = () =>
loadConfig().SEARCH_ENGINE_BACKENDS.VIDEO || getSearchEngineBackend();
export const getNewsSearchEngineBackend = () =>
loadConfig().SEARCH_ENGINE_BACKENDS.NEWS || getSearchEngineBackend();
export const getGoogleApiKey = () => loadConfig().SEARCH_ENGINES.GOOGLE.API_KEY;
export const getGoogleCseId = () => loadConfig().SEARCH_ENGINES.GOOGLE.CSE_ID;
export const getBraveApiKey = () => loadConfig().SEARCH_ENGINES.BRAVE.API_KEY;
export const getBingSubscriptionKey = () =>
loadConfig().SEARCH_ENGINES.BING.SUBSCRIPTION_KEY;
export const getYacyJsonEndpoint = () =>
loadConfig().SEARCH_ENGINES.YACY.ENDPOINT;
export const getSearxngApiEndpoint = () => export const getSearxngApiEndpoint = () =>
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG; process.env.SEARXNG_API_URL || loadConfig().SEARCH_ENGINES.SEARXNG.ENDPOINT;
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const updateConfig = (config: RecursivePartial<Config>) => { export const getCustomOpenaiApiKey = () =>
const currentConfig = loadConfig(); loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
for (const key in currentConfig) { export const getCustomOpenaiApiUrl = () =>
if (!config[key]) config[key] = {}; loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
if (typeof currentConfig[key] === 'object' && currentConfig[key] !== null) { export const getCustomOpenaiModelName = () =>
for (const nestedKey in currentConfig[key]) { loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
if (
!config[key][nestedKey] && const mergeConfigs = (current: any, update: any): any => {
currentConfig[key][nestedKey] && if (update === null || update === undefined) {
config[key][nestedKey] !== '' return current;
) { }
config[key][nestedKey] = currentConfig[key][nestedKey];
} if (typeof current !== 'object' || current === null) {
return update;
}
const result = { ...current };
for (const key in update) {
if (Object.prototype.hasOwnProperty.call(update, key)) {
const updateValue = update[key];
if (
typeof updateValue === 'object' &&
updateValue !== null &&
typeof result[key] === 'object' &&
result[key] !== null
) {
result[key] = mergeConfigs(result[key], updateValue);
} else if (updateValue !== undefined) {
result[key] = updateValue;
} }
} else if (currentConfig[key] && config[key] !== '') {
config[key] = currentConfig[key];
} }
} }
return result;
};
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();
const mergedConfig = mergeConfigs(currentConfig, config);
fs.writeFileSync( fs.writeFileSync(
path.join(__dirname, `../${configFileName}`), path.join(__dirname, `../${configFileName}`),
toml.stringify(config), toml.stringify(mergedConfig),
); );
}; };

View File

@ -4,6 +4,12 @@ import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
import { loadAnthropicChatModels } from './anthropic'; import { loadAnthropicChatModels } from './anthropic';
import { loadTransformersEmbeddingsModels } from './transformers'; import { loadTransformersEmbeddingsModels } from './transformers';
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini'; import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../../config';
import { ChatOpenAI } from '@langchain/openai';
const chatModelProviders = { const chatModelProviders = {
openai: loadOpenAIChatModels, openai: loadOpenAIChatModels,
@ -30,7 +36,27 @@ export const getAvailableChatModelProviders = async () => {
} }
} }
models['custom_openai'] = {}; const customOpenAiApiKey = getCustomOpenaiApiKey();
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
? {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
temperature: 0.7,
configuration: {
baseURL: customOpenAiApiUrl,
},
}),
},
}
: {}),
};
return models; return models;
}; };

View File

@ -0,0 +1,105 @@
import axios from 'axios';
import { getBingSubscriptionKey } from '../../config';
interface BingAPISearchResult {
_type: string;
name: string;
url: string;
displayUrl: string;
snippet?: string;
dateLastCrawled?: string;
thumbnailUrl?: string;
contentUrl?: string;
hostPageUrl?: string;
width?: number;
height?: number;
accentColor?: string;
contentSize?: string;
datePublished?: string;
encodingFormat?: string;
hostPageDisplayUrl?: string;
id?: string;
isLicensed?: boolean;
isFamilyFriendly?: boolean;
language?: string;
mediaUrl?: string;
motionThumbnailUrl?: string;
publisher?: string;
viewCount?: number;
webSearchUrl?: string;
primaryImageOfPage?: {
thumbnailUrl?: string;
width?: number;
height?: number;
};
video?: {
allowHttpsEmbed?: boolean;
embedHtml?: string;
allowMobileEmbed?: boolean;
viewCount?: number;
duration?: string;
};
image?: {
thumbnail?: {
contentUrl?: string;
width?: number;
height?: number;
};
imageInsightsToken?: string;
imageId?: string;
};
}
export const searchBingAPI = async (query: string) => {
try {
const bingApiKey = await getBingSubscriptionKey();
const url = new URL(`https://api.cognitive.microsoft.com/bing/v7.0/search`);
url.searchParams.append('q', query);
url.searchParams.append('responseFilter', 'Webpages,Images,Videos');
const res = await axios.get(url.toString(), {
headers: {
'Ocp-Apim-Subscription-Key': bingApiKey,
Accept: 'application/json',
},
});
if (res.data.error) {
throw new Error(`Bing API Error: ${res.data.error.message}`);
}
const originalres = res.data;
// Extract web, image, and video results
const webResults = originalres.webPages?.value || [];
const imageResults = originalres.images?.value || [];
const videoResults = originalres.videos?.value || [];
const results = webResults.map((item: BingAPISearchResult) => ({
title: item.name,
url: item.url,
content: item.snippet,
img_src:
item.primaryImageOfPage?.thumbnailUrl ||
imageResults.find((img: any) => img.hostPageUrl === item.url)
?.thumbnailUrl ||
videoResults.find((vid: any) => vid.hostPageUrl === item.url)
?.thumbnailUrl,
...(item.video && {
videoData: {
duration: item.video.duration,
embedUrl: item.video.embedHtml?.match(/src="(.*?)"/)?.[1],
},
publisher: item.publisher,
datePublished: item.datePublished,
}),
}));
return { results, originalres };
} catch (error) {
const errorMessage = error.response?.data
? JSON.stringify(error.response.data, null, 2)
: error.message || 'Unknown error';
throw new Error(`Bing API Error: ${errorMessage}`);
}
};

View File

@ -0,0 +1,102 @@
import axios from 'axios';
import { getBraveApiKey } from '../../config';
interface BraveSearchResult {
title: string;
url: string;
content?: string;
img_src?: string;
age?: string;
family_friendly?: boolean;
language?: string;
video?: {
embedUrl?: string;
duration?: string;
};
rating?: {
value: number;
scale: number;
};
products?: Array<{
name: string;
price?: string;
}>;
recipe?: {
ingredients?: string[];
cookTime?: string;
};
meta?: {
fetched?: string;
lastCrawled?: string;
};
}
export const searchBraveAPI = async (
query: string,
numResults: number = 20,
): Promise<{ results: BraveSearchResult[]; originalres: any }> => {
try {
const braveApiKey = await getBraveApiKey();
const url = new URL(`https://api.search.brave.com/res/v1/web/search`);
url.searchParams.append('q', query);
url.searchParams.append('count', numResults.toString());
const res = await axios.get(url.toString(), {
headers: {
'X-Subscription-Token': braveApiKey,
Accept: 'application/json',
},
});
if (res.data.error) {
throw new Error(`Brave API Error: ${res.data.error.message}`);
}
const originalres = res.data;
const webResults = originalres.web?.results || [];
const results: BraveSearchResult[] = webResults.map((item: any) => ({
title: item.title,
url: item.url,
content: item.description,
img_src: item.thumbnail?.src || item.deep_results?.images?.[0]?.src,
age: item.age,
family_friendly: item.family_friendly,
language: item.language,
video: item.video
? {
embedUrl: item.video.embed_url,
duration: item.video.duration,
}
: undefined,
rating: item.rating
? {
value: item.rating.value,
scale: item.rating.scale_max,
}
: undefined,
products: item.deep_results?.product_cluster?.map((p: any) => ({
name: p.name,
price: p.price,
})),
recipe: item.recipe
? {
ingredients: item.recipe.ingredients,
cookTime: item.recipe.cook_time,
}
: undefined,
meta: {
fetched: item.meta?.fetched,
lastCrawled: item.meta?.last_crawled,
},
}));
return { results, originalres };
} catch (error) {
const errorMessage = error.response?.data
? JSON.stringify(error.response.data, null, 2)
: error.message || 'Unknown error';
throw new Error(`Brave API Error: ${errorMessage}`);
}
};

View File

@ -0,0 +1,85 @@
import axios from 'axios';
import { getGoogleApiKey, getGoogleCseId } from '../../config';
interface GooglePSESearchResult {
kind: string;
title: string;
htmlTitle: string;
link: string;
displayLink: string;
snippet?: string;
htmlSnippet?: string;
cacheId?: string;
formattedUrl: string;
htmlFormattedUrl: string;
pagemap?: {
videoobject: any;
cse_thumbnail?: Array<{
src: string;
width: string;
height: string;
}>;
metatags?: Array<{
[key: string]: string;
author?: string;
}>;
cse_image?: Array<{
src: string;
}>;
};
fileFormat?: string;
image?: {
contextLink: string;
thumbnailLink: string;
};
mime?: string;
labels?: Array<{
name: string;
displayName: string;
}>;
}
export const searchGooglePSE = async (query: string) => {
try {
const [googleApiKey, googleCseID] = await Promise.all([
getGoogleApiKey(),
getGoogleCseId(),
]);
const url = new URL(`https://www.googleapis.com/customsearch/v1`);
url.searchParams.append('q', query);
url.searchParams.append('cx', googleCseID);
url.searchParams.append('key', googleApiKey);
const res = await axios.get(url.toString());
if (res.data.error) {
throw new Error(`Google PSE Error: ${res.data.error.message}`);
}
const originalres = res.data.items;
const results = originalres.map((item: GooglePSESearchResult) => ({
title: item.title,
url: item.link,
content: item.snippet,
img_src:
item.pagemap?.cse_image?.[0]?.src ||
item.pagemap?.cse_thumbnail?.[0]?.src ||
item.image?.thumbnailLink,
...(item.pagemap?.videoobject?.[0] && {
videoData: {
duration: item.pagemap.videoobject[0].duration,
embedUrl: item.pagemap.videoobject[0].embedurl,
},
}),
}));
return { results, originalres };
} catch (error) {
const errorMessage = error.response?.data
? JSON.stringify(error.response.data, null, 2)
: error.message || 'Unknown error';
throw new Error(`Google PSE Error: ${errorMessage}`);
}
};

View File

@ -1,5 +1,5 @@
import axios from 'axios'; import axios from 'axios';
import { getSearxngApiEndpoint } from '../config'; import { getSearxngApiEndpoint } from '../../config';
interface SearxngSearchOptions { interface SearxngSearchOptions {
categories?: string[]; categories?: string[];

View File

@ -0,0 +1,79 @@
import axios from 'axios';
import { getYacyJsonEndpoint } from '../../config';
interface YaCySearchResult {
channels: {
title: string;
description: string;
link: string;
image: {
url: string;
title: string;
link: string;
};
startIndex: string;
itemsPerPage: string;
searchTerms: string;
items: {
title: string;
link: string;
code: string;
description: string;
pubDate: string;
image?: string;
size: string;
sizename: string;
guid: string;
faviconUrl: string;
host: string;
path: string;
file: string;
urlhash: string;
ranking: string;
}[];
navigation: {
facetname: string;
displayname: string;
type: string;
min: string;
max: string;
mean: string;
elements: {
name: string;
count: string;
modifier: string;
url: string;
}[];
}[];
}[];
}
export const searchYaCy = async (query: string, numResults: number = 20) => {
try {
const yacyBaseUrl = getYacyJsonEndpoint();
const url = new URL(`${yacyBaseUrl}/yacysearch.json`);
url.searchParams.append('query', query);
url.searchParams.append('count', numResults.toString());
const res = await axios.get(url.toString());
const originalres = res.data as YaCySearchResult;
const results = originalres.channels[0].items.map((item) => ({
title: item.title,
url: item.link,
content: item.description,
img_src: item.image || null,
pubDate: item.pubDate,
host: item.host,
}));
return { results, originalres };
} catch (error) {
const errorMessage = error.response?.data
? JSON.stringify(error.response.data, null, 2)
: error.message || 'Unknown error';
throw new Error(`YaCy Error: ${errorMessage}`);
}
};

View File

@ -10,6 +10,19 @@ import {
getGeminiApiKey, getGeminiApiKey,
getOpenaiApiKey, getOpenaiApiKey,
updateConfig, updateConfig,
getCustomOpenaiApiUrl,
getCustomOpenaiApiKey,
getCustomOpenaiModelName,
getSearchEngineBackend,
getImageSearchEngineBackend,
getVideoSearchEngineBackend,
getNewsSearchEngineBackend,
getSearxngApiEndpoint,
getGoogleApiKey,
getGoogleCseId,
getBingSubscriptionKey,
getBraveApiKey,
getYacyJsonEndpoint,
} from '../config'; } from '../config';
import logger from '../utils/logger'; import logger from '../utils/logger';
@ -54,6 +67,24 @@ router.get('/', async (_, res) => {
config['anthropicApiKey'] = getAnthropicApiKey(); config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey(); config['geminiApiKey'] = getGeminiApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
// Add search engine configuration
config['searchEngineBackends'] = {
search: getSearchEngineBackend(),
image: getImageSearchEngineBackend(),
video: getVideoSearchEngineBackend(),
news: getNewsSearchEngineBackend(),
};
config['searxngEndpoint'] = getSearxngApiEndpoint();
config['googleApiKey'] = getGoogleApiKey();
config['googleCseId'] = getGoogleCseId();
config['bingSubscriptionKey'] = getBingSubscriptionKey();
config['braveApiKey'] = getBraveApiKey();
config['yacyEndpoint'] = getYacyJsonEndpoint();
res.status(200).json(config); res.status(200).json(config);
} catch (err: any) { } catch (err: any) {
@ -66,14 +97,51 @@ router.post('/', async (req, res) => {
const config = req.body; const config = req.body;
const updatedConfig = { const updatedConfig = {
API_KEYS: { MODELS: {
OPENAI: config.openaiApiKey, OPENAI: {
GROQ: config.groqApiKey, API_KEY: config.openaiApiKey,
ANTHROPIC: config.anthropicApiKey, },
GEMINI: config.geminiApiKey, GROQ: {
API_KEY: config.groqApiKey,
},
ANTHROPIC: {
API_KEY: config.anthropicApiKey,
},
GEMINI: {
API_KEY: config.geminiApiKey,
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,
MODEL_NAME: config.customOpenaiModelName,
},
}, },
API_ENDPOINTS: { SEARCH_ENGINE_BACKENDS: config.searchEngineBackends ? {
OLLAMA: config.ollamaApiUrl, SEARCH: config.searchEngineBackends.search,
IMAGE: config.searchEngineBackends.image,
VIDEO: config.searchEngineBackends.video,
NEWS: config.searchEngineBackends.news,
} : undefined,
SEARCH_ENGINES: {
GOOGLE: {
API_KEY: config.googleApiKey,
CSE_ID: config.googleCseId,
},
SEARXNG: {
ENDPOINT: config.searxngEndpoint,
},
BING: {
SUBSCRIPTION_KEY: config.bingSubscriptionKey,
},
BRAVE: {
API_KEY: config.braveApiKey,
},
YACY: {
ENDPOINT: config.yacyEndpoint,
},
}, },
}; };

View File

@ -1,42 +1,125 @@
import express from 'express'; import express from 'express';
import { searchSearxng } from '../lib/searxng'; import { searchSearxng } from '../lib/searchEngines/searxng';
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
import { searchBraveAPI } from '../lib/searchEngines/brave';
import { searchYaCy } from '../lib/searchEngines/yacy';
import { searchBingAPI } from '../lib/searchEngines/bing';
import { getNewsSearchEngineBackend } from '../config';
import logger from '../utils/logger'; import logger from '../utils/logger';
const router = express.Router(); const router = express.Router();
async function performSearch(query: string, site: string) {
const searchEngine = getNewsSearchEngineBackend();
switch (searchEngine) {
case 'google': {
const googleResult = await searchGooglePSE(query);
return googleResult.originalres.map((item) => {
const imageSources = [
item.pagemap?.cse_image?.[0]?.src,
item.pagemap?.cse_thumbnail?.[0]?.src,
item.pagemap?.metatags?.[0]?.['og:image'],
item.pagemap?.metatags?.[0]?.['twitter:image'],
item.pagemap?.metatags?.[0]?.['image'],
].filter(Boolean); // Remove undefined values
return {
title: item.title,
url: item.link,
content: item.snippet,
thumbnail: imageSources[0], // First available image
img_src: imageSources[0], // Same as thumbnail for consistency
iframe_src: null,
author: item.pagemap?.metatags?.[0]?.['og:site_name'] || site,
publishedDate:
item.pagemap?.metatags?.[0]?.['article:published_time'],
};
});
}
case 'searxng': {
const searxResult = await searchSearxng(query, {
engines: ['bing news'],
pageno: 1,
});
return searxResult.results;
}
case 'brave': {
const braveResult = await searchBraveAPI(query);
return braveResult.results.map((item) => ({
title: item.title,
url: item.url,
content: item.content,
thumbnail: item.img_src,
img_src: item.img_src,
iframe_src: null,
author: item.meta?.fetched || site,
publishedDate: item.meta?.lastCrawled,
}));
}
case 'yacy': {
const yacyResult = await searchYaCy(query);
return yacyResult.results.map((item) => ({
title: item.title,
url: item.url,
content: item.content,
thumbnail: item.img_src,
img_src: item.img_src,
iframe_src: null,
author: item?.host || site,
publishedDate: item?.pubDate,
}));
}
case 'bing': {
const bingResult = await searchBingAPI(query);
return bingResult.results.map((item) => ({
title: item.title,
url: item.url,
content: item.content,
thumbnail: item.img_src,
img_src: item.img_src,
iframe_src: null,
author: item?.publisher || site,
publishedDate: item?.datePublished,
}));
}
default:
throw new Error(`Unknown search engine ${searchEngine}`);
}
}
router.get('/', async (req, res) => { router.get('/', async (req, res) => {
try { try {
const queries = [
{ site: 'businessinsider.com', topic: 'AI' },
{ site: 'www.exchangewire.com', topic: 'AI' },
{ site: 'yahoo.com', topic: 'AI' },
{ site: 'businessinsider.com', topic: 'tech' },
{ site: 'www.exchangewire.com', topic: 'tech' },
{ site: 'yahoo.com', topic: 'tech' },
];
const data = ( const data = (
await Promise.all([ await Promise.all(
searchSearxng('site:businessinsider.com AI', { queries.map(async ({ site, topic }) => {
engines: ['bing news'], try {
pageno: 1, const query = `site:${site} ${topic}`;
return await performSearch(query, site);
} catch (error) {
logger.error(`Error searching ${site}: ${error.message}`);
return [];
}
}), }),
searchSearxng('site:www.exchangewire.com AI', { )
engines: ['bing news'],
pageno: 1,
}),
searchSearxng('site:yahoo.com AI', {
engines: ['bing news'],
pageno: 1,
}),
searchSearxng('site:businessinsider.com tech', {
engines: ['bing news'],
pageno: 1,
}),
searchSearxng('site:www.exchangewire.com tech', {
engines: ['bing news'],
pageno: 1,
}),
searchSearxng('site:yahoo.com tech', {
engines: ['bing news'],
pageno: 1,
}),
])
) )
.map((result) => result.results)
.flat() .flat()
.sort(() => Math.random() - 0.5); .sort(() => Math.random() - 0.5)
.filter((item) => item.title && item.url && item.content);
return res.json({ blogs: data }); return res.json({ blogs: data });
} catch (err: any) { } catch (err: any) {

View File

@ -5,14 +5,17 @@ import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
const router = express.Router(); const router = express.Router();
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string;
} }
interface ImageSearchBody { interface ImageSearchBody {
@ -44,21 +47,12 @@ router.post('/', async (req, res) => {
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
if (body.chatModel?.provider === 'custom_openai') { if (body.chatModel?.provider === 'custom_openai') {
if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({ llm = new ChatOpenAI({
modelName: body.chatModel.model, modelName: getCustomOpenaiModelName(),
openAIApiKey: body.chatModel.customOpenAIKey, openAIApiKey: getCustomOpenaiApiKey(),
temperature: 0.7, temperature: 0.7,
configuration: { configuration: {
baseURL: body.chatModel.customOpenAIBaseURL, baseURL: getCustomOpenaiApiUrl(),
}, },
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if ( } else if (

View File

@ -10,14 +10,19 @@ import {
import { searchHandlers } from '../websocket/messageHandler'; import { searchHandlers } from '../websocket/messageHandler';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { MetaSearchAgentType } from '../search/metaSearchAgent'; import { MetaSearchAgentType } from '../search/metaSearchAgent';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
const router = express.Router(); const router = express.Router();
interface chatModel { interface chatModel {
provider: string; provider: string;
model: string; model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string; customOpenAIKey?: string;
customOpenAIBaseURL?: string;
} }
interface embeddingModel { interface embeddingModel {
@ -78,21 +83,14 @@ router.post('/', async (req, res) => {
let embeddings: Embeddings | undefined; let embeddings: Embeddings | undefined;
if (body.chatModel?.provider === 'custom_openai') { if (body.chatModel?.provider === 'custom_openai') {
if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({ llm = new ChatOpenAI({
modelName: body.chatModel.model, modelName: body.chatModel?.model || getCustomOpenaiModelName(),
openAIApiKey: body.chatModel.customOpenAIKey, openAIApiKey:
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
temperature: 0.7, temperature: 0.7,
configuration: { configuration: {
baseURL: body.chatModel.customOpenAIBaseURL, baseURL:
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
}, },
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if ( } else if (

View File

@ -5,14 +5,17 @@ import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
const router = express.Router(); const router = express.Router();
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string;
} }
interface SuggestionsBody { interface SuggestionsBody {
@ -43,21 +46,12 @@ router.post('/', async (req, res) => {
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
if (body.chatModel?.provider === 'custom_openai') { if (body.chatModel?.provider === 'custom_openai') {
if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({ llm = new ChatOpenAI({
modelName: body.chatModel.model, modelName: getCustomOpenaiModelName(),
openAIApiKey: body.chatModel.customOpenAIKey, openAIApiKey: getCustomOpenaiApiKey(),
temperature: 0.7, temperature: 0.7,
configuration: { configuration: {
baseURL: body.chatModel.customOpenAIBaseURL, baseURL: getCustomOpenaiApiUrl(),
}, },
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if ( } else if (

View File

@ -5,14 +5,17 @@ import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import handleVideoSearch from '../chains/videoSearchAgent'; import handleVideoSearch from '../chains/videoSearchAgent';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
const router = express.Router(); const router = express.Router();
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string;
} }
interface VideoSearchBody { interface VideoSearchBody {
@ -44,21 +47,12 @@ router.post('/', async (req, res) => {
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
if (body.chatModel?.provider === 'custom_openai') { if (body.chatModel?.provider === 'custom_openai') {
if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({ llm = new ChatOpenAI({
modelName: body.chatModel.model, modelName: getCustomOpenaiModelName(),
openAIApiKey: body.chatModel.customOpenAIKey, openAIApiKey: getCustomOpenaiApiKey(),
temperature: 0.7, temperature: 0.7,
configuration: { configuration: {
baseURL: body.chatModel.customOpenAIBaseURL, baseURL: getCustomOpenaiApiUrl(),
}, },
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if ( } else if (

View File

@ -17,7 +17,12 @@ import LineListOutputParser from '../lib/outputParsers/listLineOutputParser';
import LineOutputParser from '../lib/outputParsers/lineOutputParser'; import LineOutputParser from '../lib/outputParsers/lineOutputParser';
import { getDocumentsFromLinks } from '../utils/documents'; import { getDocumentsFromLinks } from '../utils/documents';
import { Document } from 'langchain/document'; import { Document } from 'langchain/document';
import { searchSearxng } from '../lib/searxng'; import { searchSearxng } from '../lib/searchEngines/searxng';
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
import { searchBingAPI } from '../lib/searchEngines/bing';
import { searchBraveAPI } from '../lib/searchEngines/brave';
import { searchYaCy } from '../lib/searchEngines/yacy';
import { getSearchEngineBackend } from '../config';
import path from 'path'; import path from 'path';
import fs from 'fs'; import fs from 'fs';
import computeSimilarity from '../utils/computeSimilarity'; import computeSimilarity from '../utils/computeSimilarity';
@ -203,10 +208,37 @@ class MetaSearchAgent implements MetaSearchAgentType {
return { query: question, docs: docs }; return { query: question, docs: docs };
} else { } else {
const res = await searchSearxng(question, { const searchEngine = getSearchEngineBackend();
language: 'en',
engines: this.config.activeEngines, let res;
}); switch (searchEngine) {
case 'searxng':
res = await searchSearxng(question, {
language: 'en',
engines: this.config.activeEngines,
});
break;
case 'google':
res = await searchGooglePSE(question);
break;
case 'bing':
res = await searchBingAPI(question);
break;
case 'brave':
res = await searchBraveAPI(question);
break;
case 'yacy':
res = await searchYaCy(question);
break;
default:
throw new Error(`Unknown search engine ${searchEngine}`);
}
if (!res?.results) {
throw new Error(
`No results found for search engine: ${searchEngine}`,
);
}
const documents = res.results.map( const documents = res.results.map(
(result) => (result) =>

View File

@ -9,6 +9,11 @@ import type { Embeddings } from '@langchain/core/embeddings';
import type { IncomingMessage } from 'http'; import type { IncomingMessage } from 'http';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
export const handleConnection = async ( export const handleConnection = async (
ws: WebSocket, ws: WebSocket,
@ -48,14 +53,20 @@ export const handleConnection = async (
llm = chatModelProviders[chatModelProvider][chatModel] llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined; .model as unknown as BaseChatModel | undefined;
} else if (chatModelProvider == 'custom_openai') { } else if (chatModelProvider == 'custom_openai') {
llm = new ChatOpenAI({ const customOpenaiApiKey = getCustomOpenaiApiKey();
modelName: chatModel, const customOpenaiApiUrl = getCustomOpenaiApiUrl();
openAIApiKey: searchParams.get('openAIApiKey'), const customOpenaiModelName = getCustomOpenaiModelName();
temperature: 0.7,
configuration: { if (customOpenaiApiKey && customOpenaiApiUrl && customOpenaiModelName) {
baseURL: searchParams.get('openAIBaseURL'), llm = new ChatOpenAI({
}, modelName: customOpenaiModelName,
}) as unknown as BaseChatModel; openAIApiKey: customOpenaiApiKey,
temperature: 0.7,
configuration: {
baseURL: customOpenaiApiUrl,
},
}) as unknown as BaseChatModel;
}
} }
if ( if (

1061
ui/app/settings/page.tsx Normal file

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,7 @@ import { toast } from 'sonner';
import { useSearchParams } from 'next/navigation'; import { useSearchParams } from 'next/navigation';
import { getSuggestions } from '@/lib/actions'; import { getSuggestions } from '@/lib/actions';
import { Settings } from 'lucide-react'; import { Settings } from 'lucide-react';
import SettingsDialog from './SettingsDialog'; import Link from 'next/link';
import NextError from 'next/error'; import NextError from 'next/error';
export type Message = { export type Message = {
@ -40,6 +40,7 @@ const useSocket = (
const isCleaningUpRef = useRef(false); const isCleaningUpRef = useRef(false);
const MAX_RETRIES = 3; const MAX_RETRIES = 3;
const INITIAL_BACKOFF = 1000; // 1 second const INITIAL_BACKOFF = 1000; // 1 second
const isConnectionErrorRef = useRef(false);
const getBackoffDelay = (retryCount: number) => { const getBackoffDelay = (retryCount: number) => {
return Math.min(INITIAL_BACKOFF * Math.pow(2, retryCount), 10000); // Cap at 10 seconds return Math.min(INITIAL_BACKOFF * Math.pow(2, retryCount), 10000); // Cap at 10 seconds
@ -58,14 +59,17 @@ const useSocket = (
let embeddingModelProvider = localStorage.getItem( let embeddingModelProvider = localStorage.getItem(
'embeddingModelProvider', 'embeddingModelProvider',
); );
let openAIBaseURL =
chatModelProvider === 'custom_openai' const autoImageSearch = localStorage.getItem('autoImageSearch');
? localStorage.getItem('openAIBaseURL') const autoVideoSearch = localStorage.getItem('autoVideoSearch');
: null;
let openAIPIKey = if (!autoImageSearch) {
chatModelProvider === 'custom_openai' localStorage.setItem('autoImageSearch', 'true');
? localStorage.getItem('openAIApiKey') }
: null;
if (!autoVideoSearch) {
localStorage.setItem('autoVideoSearch', 'false');
}
const providers = await fetch( const providers = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/models`, `${process.env.NEXT_PUBLIC_API_URL}/models`,
@ -94,21 +98,13 @@ const useSocket = (
chatModelProvider = chatModelProvider =
chatModelProvider || Object.keys(chatModelProviders)[0]; chatModelProvider || Object.keys(chatModelProviders)[0];
if (chatModelProvider === 'custom_openai') { chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
toast.error(
'Seems like you are using the custom OpenAI provider, please open the settings and enter a model name to use.',
);
setError(true);
return;
} else {
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
if ( if (
!chatModelProviders || !chatModelProviders ||
Object.keys(chatModelProviders).length === 0 Object.keys(chatModelProviders).length === 0
) )
return toast.error('No chat models available'); return toast.error('No chat models available');
}
} }
if (!embeddingModel || !embeddingModelProvider) { if (!embeddingModel || !embeddingModelProvider) {
@ -139,9 +135,7 @@ const useSocket = (
if ( if (
Object.keys(chatModelProviders).length > 0 && Object.keys(chatModelProviders).length > 0 &&
(((!openAIBaseURL || !openAIPIKey) && !chatModelProviders[chatModelProvider]
chatModelProvider === 'custom_openai') ||
!chatModelProviders[chatModelProvider])
) { ) {
const chatModelProvidersKeys = Object.keys(chatModelProviders); const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider = chatModelProvider =
@ -149,23 +143,11 @@ const useSocket = (
(key) => Object.keys(chatModelProviders[key]).length > 0, (key) => Object.keys(chatModelProviders[key]).length > 0,
) || chatModelProvidersKeys[0]; ) || chatModelProvidersKeys[0];
if (
chatModelProvider === 'custom_openai' &&
(!openAIBaseURL || !openAIPIKey)
) {
toast.error(
'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL',
);
setError(true);
return;
}
localStorage.setItem('chatModelProvider', chatModelProvider); localStorage.setItem('chatModelProvider', chatModelProvider);
} }
if ( if (
chatModelProvider && chatModelProvider &&
(!openAIBaseURL || !openAIPIKey) &&
!chatModelProviders[chatModelProvider][chatModel] !chatModelProviders[chatModelProvider][chatModel]
) { ) {
chatModel = Object.keys( chatModel = Object.keys(
@ -251,6 +233,8 @@ const useSocket = (
console.debug(new Date(), 'ws:connected'); console.debug(new Date(), 'ws:connected');
} }
if (data.type === 'error') { if (data.type === 'error') {
isConnectionErrorRef.current = true;
setError(true);
toast.error(data.data); toast.error(data.data);
} }
}); });
@ -265,7 +249,7 @@ const useSocket = (
clearTimeout(timeoutId); clearTimeout(timeoutId);
setIsWSReady(false); setIsWSReady(false);
console.debug(new Date(), 'ws:disconnected'); console.debug(new Date(), 'ws:disconnected');
if (!isCleaningUpRef.current) { if (!isCleaningUpRef.current && !isConnectionErrorRef.current) {
toast.error('Connection lost. Attempting to reconnect...'); toast.error('Connection lost. Attempting to reconnect...');
attemptReconnect(); attemptReconnect();
} }
@ -595,6 +579,17 @@ const ChatWindow = ({ id }: { id?: string }) => {
}), }),
); );
} }
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (autoImageSearch === 'true') {
document.getElementById('search-images')?.click();
}
if (autoVideoSearch === 'true') {
document.getElementById('search-videos')?.click();
}
} }
}; };
@ -629,17 +624,15 @@ const ChatWindow = ({ id }: { id?: string }) => {
return ( return (
<div className="relative"> <div className="relative">
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5"> <div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
<Settings <Link href="/settings">
className="cursor-pointer lg:hidden" <Settings className="cursor-pointer lg:hidden" />
onClick={() => setIsSettingsOpen(true)} </Link>
/>
</div> </div>
<div className="flex flex-col items-center justify-center min-h-screen"> <div className="flex flex-col items-center justify-center min-h-screen">
<p className="dark:text-white/70 text-black/70 text-sm"> <p className="dark:text-white/70 text-black/70 text-sm">
Failed to connect to the server. Please try again later. Failed to connect to the server. Please try again later.
</p> </p>
</div> </div>
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
</div> </div>
); );
} }

View File

@ -1,8 +1,8 @@
import { Settings } from 'lucide-react'; import { Settings } from 'lucide-react';
import EmptyChatMessageInput from './EmptyChatMessageInput'; import EmptyChatMessageInput from './EmptyChatMessageInput';
import SettingsDialog from './SettingsDialog';
import { useState } from 'react'; import { useState } from 'react';
import { File } from './ChatWindow'; import { File } from './ChatWindow';
import Link from 'next/link';
const EmptyChat = ({ const EmptyChat = ({
sendMessage, sendMessage,
@ -29,12 +29,10 @@ const EmptyChat = ({
return ( return (
<div className="relative"> <div className="relative">
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5"> <div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
<Settings <Link href="/settings">
className="cursor-pointer lg:hidden" <Settings className="cursor-pointer lg:hidden" />
onClick={() => setIsSettingsOpen(true)} </Link>
/>
</div> </div>
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8"> <div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8"> <h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">

View File

@ -68,7 +68,13 @@ const MessageBox = ({
return ( return (
<div> <div>
{message.role === 'user' && ( {message.role === 'user' && (
<div className={cn('w-full', messageIndex === 0 ? 'pt-16' : 'pt-8')}> <div
className={cn(
'w-full',
messageIndex === 0 ? 'pt-16' : 'pt-8',
'break-words',
)}
>
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12"> <h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
{message.content} {message.content}
</h2> </h2>

View File

@ -110,7 +110,7 @@ const Attach = ({
<button <button
type="button" type="button"
onClick={() => fileInputRef.current.click()} onClick={() => fileInputRef.current.click()}
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200" className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
> >
<input <input
type="file" type="file"
@ -128,7 +128,7 @@ const Attach = ({
setFiles([]); setFiles([]);
setFileIds([]); setFileIds([]);
}} }}
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200" className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
> >
<Trash size={14} /> <Trash size={14} />
<p className="text-xs">Clear</p> <p className="text-xs">Clear</p>
@ -145,7 +145,7 @@ const Attach = ({
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md"> <div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-white/70" /> <File size={16} className="text-white/70" />
</div> </div>
<p className="text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
{file.fileName.length > 25 {file.fileName.length > 25
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) + ? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
'...' + '...' +

View File

@ -82,7 +82,7 @@ const AttachSmall = ({
<button <button
type="button" type="button"
onClick={() => fileInputRef.current.click()} onClick={() => fileInputRef.current.click()}
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200" className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
> >
<input <input
type="file" type="file"
@ -100,7 +100,7 @@ const AttachSmall = ({
setFiles([]); setFiles([]);
setFileIds([]); setFileIds([]);
}} }}
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200" className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
> >
<Trash size={14} /> <Trash size={14} />
<p className="text-xs">Clear</p> <p className="text-xs">Clear</p>
@ -117,7 +117,7 @@ const AttachSmall = ({
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md"> <div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-white/70" /> <File size={16} className="text-white/70" />
</div> </div>
<p className="text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
{file.fileName.length > 25 {file.fileName.length > 25
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) + ? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
'...' + '...' +

View File

@ -27,6 +27,7 @@ const SearchImages = ({
<> <>
{!loading && images === null && ( {!loading && images === null && (
<button <button
id="search-images"
onClick={async () => { onClick={async () => {
setLoading(true); setLoading(true);

View File

@ -42,6 +42,7 @@ const Searchvideos = ({
<> <>
{!loading && videos === null && ( {!loading && videos === null && (
<button <button
id="search-videos"
onClick={async () => { onClick={async () => {
setLoading(true); setLoading(true);

View File

@ -1,528 +0,0 @@
import { cn } from '@/lib/utils';
import {
Dialog,
DialogPanel,
DialogTitle,
Transition,
TransitionChild,
} from '@headlessui/react';
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
import React, {
Fragment,
useEffect,
useState,
type SelectHTMLAttributes,
} from 'react';
import ThemeSwitcher from './theme/Switcher';
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
const Input = ({ className, ...restProps }: InputProps) => {
return (
<input
{...restProps}
className={cn(
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
className,
)}
/>
);
};
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
options: { value: string; label: string; disabled?: boolean }[];
}
export const Select = ({ className, options, ...restProps }: SelectProps) => {
return (
<select
{...restProps}
className={cn(
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
className,
)}
>
{options.map(({ label, value, disabled }) => {
return (
<option key={value} value={value} disabled={disabled}>
{label}
</option>
);
})}
</select>
);
};
interface SettingsType {
chatModelProviders: {
[key: string]: [Record<string, any>];
};
embeddingModelProviders: {
[key: string]: [Record<string, any>];
};
openaiApiKey: string;
groqApiKey: string;
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
}
const SettingsDialog = ({
isOpen,
setIsOpen,
}: {
isOpen: boolean;
setIsOpen: (isOpen: boolean) => void;
}) => {
const [config, setConfig] = useState<SettingsType | null>(null);
const [chatModels, setChatModels] = useState<Record<string, any>>({});
const [embeddingModels, setEmbeddingModels] = useState<Record<string, any>>(
{},
);
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<
string | null
>(null);
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
null,
);
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] =
useState<string | null>(null);
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
string | null
>(null);
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>('');
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>('');
const [isLoading, setIsLoading] = useState(false);
const [isUpdating, setIsUpdating] = useState(false);
useEffect(() => {
if (isOpen) {
const fetchConfig = async () => {
setIsLoading(true);
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
headers: {
'Content-Type': 'application/json',
},
});
const data = (await res.json()) as SettingsType;
setConfig(data);
const chatModelProvidersKeys = Object.keys(
data.chatModelProviders || {},
);
const embeddingModelProvidersKeys = Object.keys(
data.embeddingModelProviders || {},
);
const defaultChatModelProvider =
chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
const defaultEmbeddingModelProvider =
embeddingModelProvidersKeys.length > 0
? embeddingModelProvidersKeys[0]
: '';
const chatModelProvider =
localStorage.getItem('chatModelProvider') ||
defaultChatModelProvider ||
'';
const chatModel =
localStorage.getItem('chatModel') ||
(data.chatModelProviders &&
data.chatModelProviders[chatModelProvider]?.length > 0
? data.chatModelProviders[chatModelProvider][0].name
: undefined) ||
'';
const embeddingModelProvider =
localStorage.getItem('embeddingModelProvider') ||
defaultEmbeddingModelProvider ||
'';
const embeddingModel =
localStorage.getItem('embeddingModel') ||
(data.embeddingModelProviders &&
data.embeddingModelProviders[embeddingModelProvider]?.[0].name) ||
'';
setSelectedChatModelProvider(chatModelProvider);
setSelectedChatModel(chatModel);
setSelectedEmbeddingModelProvider(embeddingModelProvider);
setSelectedEmbeddingModel(embeddingModel);
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || '');
setChatModels(data.chatModelProviders || {});
setEmbeddingModels(data.embeddingModelProviders || {});
setIsLoading(false);
};
fetchConfig();
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isOpen]);
const handleSubmit = async () => {
setIsUpdating(true);
try {
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(config),
});
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
localStorage.setItem('chatModel', selectedChatModel!);
localStorage.setItem(
'embeddingModelProvider',
selectedEmbeddingModelProvider!,
);
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
} catch (err) {
console.log(err);
} finally {
setIsUpdating(false);
setIsOpen(false);
window.location.reload();
}
};
return (
<Transition appear show={isOpen} as={Fragment}>
<Dialog
as="div"
className="relative z-50"
onClose={() => setIsOpen(false)}
>
<TransitionChild
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
enterTo="opacity-100"
leave="ease-in duration-200"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<div className="fixed inset-0 bg-white/50 dark:bg-black/50" />
</TransitionChild>
<div className="fixed inset-0 overflow-y-auto">
<div className="flex min-h-full items-center justify-center p-4 text-center">
<TransitionChild
as={Fragment}
enter="ease-out duration-200"
enterFrom="opacity-0 scale-95"
enterTo="opacity-100 scale-100"
leave="ease-in duration-100"
leaveFrom="opacity-100 scale-200"
leaveTo="opacity-0 scale-95"
>
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<DialogTitle className="text-xl font-medium leading-6 dark:text-white">
Settings
</DialogTitle>
{config && !isLoading && (
<div className="flex flex-col space-y-4 mt-6">
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Theme
</p>
<ThemeSwitcher />
</div>
{config.chatModelProviders && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Chat model Provider
</p>
<Select
value={selectedChatModelProvider ?? undefined}
onChange={(e) => {
setSelectedChatModelProvider(e.target.value);
if (e.target.value === 'custom_openai') {
setSelectedChatModel('');
} else {
setSelectedChatModel(
config.chatModelProviders[e.target.value][0]
.name,
);
}
}}
options={Object.keys(config.chatModelProviders).map(
(provider) => ({
value: provider,
label:
provider.charAt(0).toUpperCase() +
provider.slice(1),
}),
)}
/>
</div>
)}
{selectedChatModelProvider &&
selectedChatModelProvider != 'custom_openai' && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Chat Model
</p>
<Select
value={selectedChatModel ?? undefined}
onChange={(e) =>
setSelectedChatModel(e.target.value)
}
options={(() => {
const chatModelProvider =
config.chatModelProviders[
selectedChatModelProvider
];
return chatModelProvider
? chatModelProvider.length > 0
? chatModelProvider.map((model) => ({
value: model.name,
label: model.displayName,
}))
: [
{
value: '',
label: 'No models available',
disabled: true,
},
]
: [
{
value: '',
label:
'Invalid provider, please check backend logs',
disabled: true,
},
];
})()}
/>
</div>
)}
{selectedChatModelProvider &&
selectedChatModelProvider === 'custom_openai' && (
<>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Model name
</p>
<Input
type="text"
placeholder="Model name"
defaultValue={selectedChatModel!}
onChange={(e) =>
setSelectedChatModel(e.target.value)
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Custom OpenAI API Key
</p>
<Input
type="text"
placeholder="Custom OpenAI API Key"
defaultValue={customOpenAIApiKey!}
onChange={(e) =>
setCustomOpenAIApiKey(e.target.value)
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Custom OpenAI Base URL
</p>
<Input
type="text"
placeholder="Custom OpenAI Base URL"
defaultValue={customOpenAIBaseURL!}
onChange={(e) =>
setCustomOpenAIBaseURL(e.target.value)
}
/>
</div>
</>
)}
{/* Embedding models */}
{config.embeddingModelProviders && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Embedding model Provider
</p>
<Select
value={selectedEmbeddingModelProvider ?? undefined}
onChange={(e) => {
setSelectedEmbeddingModelProvider(e.target.value);
setSelectedEmbeddingModel(
config.embeddingModelProviders[e.target.value][0]
.name,
);
}}
options={Object.keys(
config.embeddingModelProviders,
).map((provider) => ({
label:
provider.charAt(0).toUpperCase() +
provider.slice(1),
value: provider,
}))}
/>
</div>
)}
{selectedEmbeddingModelProvider && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Embedding Model
</p>
<Select
value={selectedEmbeddingModel ?? undefined}
onChange={(e) =>
setSelectedEmbeddingModel(e.target.value)
}
options={(() => {
const embeddingModelProvider =
config.embeddingModelProviders[
selectedEmbeddingModelProvider
];
return embeddingModelProvider
? embeddingModelProvider.length > 0
? embeddingModelProvider.map((model) => ({
label: model.displayName,
value: model.name,
}))
: [
{
label: 'No embedding models available',
value: '',
disabled: true,
},
]
: [
{
label:
'Invalid provider, please check backend logs',
value: '',
disabled: true,
},
];
})()}
/>
</div>
)}
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
OpenAI API Key
</p>
<Input
type="text"
placeholder="OpenAI API Key"
defaultValue={config.openaiApiKey}
onChange={(e) =>
setConfig({
...config,
openaiApiKey: e.target.value,
})
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Ollama API URL
</p>
<Input
type="text"
placeholder="Ollama API URL"
defaultValue={config.ollamaApiUrl}
onChange={(e) =>
setConfig({
...config,
ollamaApiUrl: e.target.value,
})
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
GROQ API Key
</p>
<Input
type="text"
placeholder="GROQ API Key"
defaultValue={config.groqApiKey}
onChange={(e) =>
setConfig({
...config,
groqApiKey: e.target.value,
})
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Anthropic API Key
</p>
<Input
type="text"
placeholder="Anthropic API key"
defaultValue={config.anthropicApiKey}
onChange={(e) =>
setConfig({
...config,
anthropicApiKey: e.target.value,
})
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Gemini API Key
</p>
<Input
type="text"
placeholder="Gemini API key"
defaultValue={config.geminiApiKey}
onChange={(e) =>
setConfig({
...config,
geminiApiKey: e.target.value,
})
}
/>
</div>
</div>
)}
{isLoading && (
<div className="w-full flex items-center justify-center mt-6 text-black/70 dark:text-white/70 py-6">
<RefreshCcw className="animate-spin" />
</div>
)}
<div className="w-full mt-6 space-y-2">
<p className="text-xs text-black/50 dark:text-white/50">
We&apos;ll refresh the page after updating the settings.
</p>
<button
onClick={handleSubmit}
className="bg-[#24A0ED] flex flex-row items-center space-x-2 text-white disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#ececec21] rounded-full px-4 py-2"
disabled={isLoading || isUpdating}
>
{isUpdating ? (
<RefreshCw size={20} className="animate-spin" />
) : (
<CloudUpload size={20} />
)}
</button>
</div>
</DialogPanel>
</TransitionChild>
</div>
</div>
</Dialog>
</Transition>
);
};
export default SettingsDialog;

View File

@ -6,7 +6,6 @@ import Link from 'next/link';
import { useSelectedLayoutSegments } from 'next/navigation'; import { useSelectedLayoutSegments } from 'next/navigation';
import React, { useState, type ReactNode } from 'react'; import React, { useState, type ReactNode } from 'react';
import Layout from './Layout'; import Layout from './Layout';
import SettingsDialog from './SettingsDialog';
const VerticalIconContainer = ({ children }: { children: ReactNode }) => { const VerticalIconContainer = ({ children }: { children: ReactNode }) => {
return ( return (
@ -67,15 +66,9 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
))} ))}
</VerticalIconContainer> </VerticalIconContainer>
<Settings <Link href="/settings">
onClick={() => setIsSettingsOpen(!isSettingsOpen)} <Settings className="cursor-pointer" />
className="cursor-pointer" </Link>
/>
<SettingsDialog
isOpen={isSettingsOpen}
setIsOpen={setIsSettingsOpen}
/>
</div> </div>
</div> </div>

View File

@ -1,8 +1,7 @@
'use client'; 'use client';
import { useTheme } from 'next-themes'; import { useTheme } from 'next-themes';
import { SunIcon, MoonIcon, MonitorIcon } from 'lucide-react';
import { useCallback, useEffect, useState } from 'react'; import { useCallback, useEffect, useState } from 'react';
import { Select } from '../SettingsDialog'; import Select from '../ui/Select';
type Theme = 'dark' | 'light' | 'system'; type Theme = 'dark' | 'light' | 'system';

View File

@ -0,0 +1,28 @@
import { cn } from '@/lib/utils';
import { SelectHTMLAttributes } from 'react';
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
options: { value: string; label: string; disabled?: boolean }[];
}
export const Select = ({ className, options, ...restProps }: SelectProps) => {
return (
<select
{...restProps}
className={cn(
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
className,
)}
>
{options.map(({ label, value, disabled }) => {
return (
<option key={value} value={value} disabled={disabled}>
{label}
</option>
);
})}
</select>
);
};
export default Select;

View File

@ -1,6 +1,6 @@
{ {
"name": "perplexica-frontend", "name": "perplexica-frontend",
"version": "1.10.0-rc2", "version": "1.10.0-rc3",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {