mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-26 19:58:41 +00:00
Compare commits
51 Commits
cd9dd85263
...
develop/v1
Author | SHA1 | Date | |
---|---|---|---|
59ab10110a | |||
10f9cd2f79 | |||
82e1dd73b0 | |||
728b499281 | |||
5a4dafc753 | |||
4ac99786f0 | |||
1224281278 | |||
3daae29a5d | |||
50bcaa13f2 | |||
31e4abf068 | |||
fd6e701cf0 | |||
89880a2555 | |||
07776d8699 | |||
a24992a3db | |||
32fb6ac131 | |||
99137d95e7 | |||
490a8db538 | |||
aba702c51b | |||
89a6e7fbb1 | |||
f19d2e3a97 | |||
4a7ca8fc68 | |||
3d642f2539 | |||
aa91d3bc60 | |||
93c5ed46f6 | |||
af4b97b766 | |||
ca86a7e358 | |||
99351fc2a6 | |||
7a816efc04 | |||
d584067bb1 | |||
4d41243108 | |||
6c218b5fee | |||
1c1f31e23a | |||
5b15bcfe17 | |||
df4350f966 | |||
652ca2fdf4 | |||
216576128d | |||
bb3f180583 | |||
4d24d73161 | |||
2e166c217b | |||
4c73caadf6 | |||
5f0b87f4a9 | |||
115e6b2a71 | |||
a5c79c92ed | |||
db3cea446e | |||
8e683d266a | |||
e9ab425cee | |||
811c0c6fe1 | |||
cab1aa705c | |||
5cbc512322 | |||
41d056e755 | |||
07dc7d7649 |
4
.gitignore
vendored
4
.gitignore
vendored
@ -2,6 +2,7 @@
|
||||
node_modules/
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
package-lock.json
|
||||
|
||||
# Build output
|
||||
/.next/
|
||||
@ -37,3 +38,6 @@ Thumbs.db
|
||||
# Db
|
||||
db.sqlite
|
||||
/searxng
|
||||
|
||||
# Dev
|
||||
docker-compose-dev.yaml
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
[](https://discord.gg/26aArMy8tT)
|
||||
|
||||
|
||||

|
||||
|
||||
## Table of Contents <!-- omit in toc -->
|
||||
@ -44,7 +43,7 @@ Want to know more about its architecture and how it works? You can read it [here
|
||||
- **Normal Mode:** Processes your query and performs a web search.
|
||||
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
|
||||
- **All Mode:** Searches the entire web to find the best results.
|
||||
- **Writing Assistant Mode:** Helpful for writing tasks that does not require searching the web.
|
||||
- **Writing Assistant Mode:** Helpful for writing tasks that do not require searching the web.
|
||||
- **Academic Search Mode:** Finds articles and papers, ideal for academic research.
|
||||
- **YouTube Search Mode:** Finds YouTube videos based on the search query.
|
||||
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
|
||||
@ -143,6 +142,7 @@ You can access Perplexica over your home network by following our networking gui
|
||||
|
||||
## One-Click Deployment
|
||||
|
||||
[](https://usw.sealos.io/?openapp=system-template%3FtemplateName%3Dperplexica)
|
||||
[](https://repocloud.io/details/?app_id=267)
|
||||
|
||||
## Upcoming Features
|
||||
|
@ -4,7 +4,7 @@ services:
|
||||
volumes:
|
||||
- ./searxng:/etc/searxng:rw
|
||||
ports:
|
||||
- 4000:8080
|
||||
- '4000:8080'
|
||||
networks:
|
||||
- perplexica-network
|
||||
restart: unless-stopped
|
||||
@ -19,7 +19,7 @@ services:
|
||||
depends_on:
|
||||
- searxng
|
||||
ports:
|
||||
- 3001:3001
|
||||
- '3001:3001'
|
||||
volumes:
|
||||
- backend-dbstore:/home/perplexica/data
|
||||
- uploads:/home/perplexica/uploads
|
||||
@ -41,7 +41,7 @@ services:
|
||||
depends_on:
|
||||
- perplexica-backend
|
||||
ports:
|
||||
- 3000:3000
|
||||
- '3000:3000'
|
||||
networks:
|
||||
- perplexica-network
|
||||
restart: unless-stopped
|
||||
|
@ -7,34 +7,43 @@ To update Perplexica to the latest version, follow these steps:
|
||||
1. Clone the latest version of Perplexica from GitHub:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
||||
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
||||
```
|
||||
|
||||
2. Navigate to the Project Directory.
|
||||
2. Navigate to the project directory.
|
||||
|
||||
3. Pull latest images from registry.
|
||||
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
|
||||
|
||||
4. Pull the latest images from the registry.
|
||||
|
||||
```bash
|
||||
docker compose pull
|
||||
```
|
||||
|
||||
4. Update and Recreate containers.
|
||||
5. Update and recreate the containers.
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
5. Once the command completes running go to http://localhost:3000 and verify the latest changes.
|
||||
6. Once the command completes, go to http://localhost:3000 and verify the latest changes.
|
||||
|
||||
## For non Docker users
|
||||
## For non-Docker users
|
||||
|
||||
1. Clone the latest version of Perplexica from GitHub:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
||||
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
||||
```
|
||||
|
||||
2. Navigate to the Project Directory
|
||||
3. Execute `npm i` in both the `ui` folder and the root directory.
|
||||
4. Once packages are updated, execute `npm run build` in both the `ui` folder and the root directory.
|
||||
5. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
|
||||
2. Navigate to the project directory.
|
||||
|
||||
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
|
||||
|
||||
4. Execute `npm i` in both the `ui` folder and the root directory.
|
||||
|
||||
5. Once the packages are updated, execute `npm run build` in both the `ui` folder and the root directory.
|
||||
|
||||
6. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
|
||||
|
||||
---
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "perplexica-backend",
|
||||
"version": "1.10.0-rc2",
|
||||
"version": "1.10.0-rc3",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
@ -30,8 +30,8 @@
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@langchain/anthropic": "^0.2.3",
|
||||
"@langchain/community": "^0.2.16",
|
||||
"@langchain/openai": "^0.0.25",
|
||||
"@langchain/google-genai": "^0.0.23",
|
||||
"@langchain/openai": "^0.0.25",
|
||||
"@xenova/transformers": "^2.17.1",
|
||||
"axios": "^1.6.8",
|
||||
"better-sqlite3": "^11.0.0",
|
||||
|
@ -3,12 +3,43 @@ PORT = 3001 # Port to run the server on
|
||||
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
||||
|
||||
[API_KEYS]
|
||||
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
|
||||
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
||||
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
|
||||
GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef
|
||||
[SEARCH_ENGINE_BACKENDS] # "google" | "searxng" | "bing" | "brave" | "yacy"
|
||||
SEARCH = "searxng"
|
||||
IMAGE = "searxng"
|
||||
VIDEO = "searxng"
|
||||
NEWS = "searxng"
|
||||
|
||||
[API_ENDPOINTS]
|
||||
SEARXNG = "http://localhost:32768" # SearxNG API URL
|
||||
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
|
||||
[MODELS.OPENAI]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.GROQ]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.ANTHROPIC]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.GEMINI]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.CUSTOM_OPENAI]
|
||||
API_KEY = ""
|
||||
API_URL = ""
|
||||
|
||||
[MODELS.OLLAMA]
|
||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||
|
||||
[SEARCH_ENGINES.GOOGLE]
|
||||
API_KEY = ""
|
||||
CSE_ID = ""
|
||||
|
||||
[SEARCH_ENGINES.SEARXNG]
|
||||
ENDPOINT = ""
|
||||
|
||||
[SEARCH_ENGINES.BING]
|
||||
SUBSCRIPTION_KEY = ""
|
||||
|
||||
[SEARCH_ENGINES.BRAVE]
|
||||
API_KEY = ""
|
||||
|
||||
[SEARCH_ENGINES.YACY]
|
||||
ENDPOINT = ""
|
||||
|
@ -15,3 +15,5 @@ server:
|
||||
engines:
|
||||
- name: wolframalpha
|
||||
disabled: false
|
||||
- name: qwant
|
||||
disabled: true
|
||||
|
@ -7,7 +7,12 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
||||
import { getImageSearchEngineBackend } from '../config';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
const imageSearchChainPrompt = `
|
||||
@ -36,6 +41,103 @@ type ImageSearchChainInput = {
|
||||
query: string;
|
||||
};
|
||||
|
||||
async function performImageSearch(query: string) {
|
||||
const searchEngine = getImageSearchEngineBackend();
|
||||
let images = [];
|
||||
|
||||
switch (searchEngine) {
|
||||
case 'google': {
|
||||
const googleResult = await searchGooglePSE(query);
|
||||
images = googleResult.results
|
||||
.map((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
return {
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
source: result.displayLink,
|
||||
};
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'searxng': {
|
||||
const searxResult = await searchSearxng(query, {
|
||||
engines: ['google images', 'bing images'],
|
||||
pageno: 1,
|
||||
});
|
||||
searxResult.results.forEach((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
images.push({
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
});
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'brave': {
|
||||
const braveResult = await searchBraveAPI(query);
|
||||
images = braveResult.results
|
||||
.map((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
return {
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
source: result.url,
|
||||
};
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'yacy': {
|
||||
const yacyResult = await searchYaCy(query);
|
||||
images = yacyResult.results
|
||||
.map((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
return {
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
source: result.url,
|
||||
};
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'bing': {
|
||||
const bingResult = await searchBingAPI(query);
|
||||
images = bingResult.results
|
||||
.map((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
return {
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
source: result.url,
|
||||
};
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
||||
}
|
||||
|
||||
return images;
|
||||
}
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
|
||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
||||
@ -52,22 +154,7 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
||||
llm,
|
||||
strParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
const res = await searchSearxng(input, {
|
||||
engines: ['bing images', 'google images'],
|
||||
});
|
||||
|
||||
const images = [];
|
||||
|
||||
res.results.forEach((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
images.push({
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const images = await performImageSearch(input);
|
||||
return images.slice(0, 10);
|
||||
}),
|
||||
]);
|
||||
|
@ -7,26 +7,30 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
||||
import { getVideoSearchEngineBackend } from '../config';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
const VideoSearchChainPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||
|
||||
|
||||
Example:
|
||||
1. Follow up question: How does a car work?
|
||||
Rephrased: How does a car work?
|
||||
|
||||
|
||||
2. Follow up question: What is the theory of relativity?
|
||||
Rephrased: What is theory of relativity
|
||||
|
||||
|
||||
3. Follow up question: How does an AC work?
|
||||
Rephrased: How does an AC work
|
||||
|
||||
|
||||
Conversation:
|
||||
{chat_history}
|
||||
|
||||
|
||||
Follow up question: {query}
|
||||
Rephrased question:
|
||||
`;
|
||||
@ -38,6 +42,102 @@ type VideoSearchChainInput = {
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
|
||||
async function performVideoSearch(query: string) {
|
||||
const searchEngine = getVideoSearchEngineBackend();
|
||||
const youtubeQuery = `${query} site:youtube.com`;
|
||||
let videos = [];
|
||||
|
||||
switch (searchEngine) {
|
||||
case 'google': {
|
||||
const googleResult = await searchGooglePSE(youtubeQuery);
|
||||
googleResult.results.forEach((result) => {
|
||||
// Use .results instead of .originalres
|
||||
if (result.img_src && result.url && result.title) {
|
||||
const videoId = new URL(result.url).searchParams.get('v');
|
||||
videos.push({
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
iframe_src: videoId
|
||||
? `https://www.youtube.com/embed/${videoId}`
|
||||
: null,
|
||||
});
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'searxng': {
|
||||
const searxResult = await searchSearxng(query, {
|
||||
engines: ['youtube'],
|
||||
});
|
||||
searxResult.results.forEach((result) => {
|
||||
if (
|
||||
result.thumbnail &&
|
||||
result.url &&
|
||||
result.title &&
|
||||
result.iframe_src
|
||||
) {
|
||||
videos.push({
|
||||
img_src: result.thumbnail,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
iframe_src: result.iframe_src,
|
||||
});
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'brave': {
|
||||
const braveResult = await searchBraveAPI(youtubeQuery);
|
||||
braveResult.results.forEach((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
const videoId = new URL(result.url).searchParams.get('v');
|
||||
videos.push({
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
iframe_src: videoId
|
||||
? `https://www.youtube.com/embed/${videoId}`
|
||||
: null,
|
||||
});
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'yacy': {
|
||||
console.log('Not available for yacy');
|
||||
videos = [];
|
||||
break;
|
||||
}
|
||||
|
||||
case 'bing': {
|
||||
const bingResult = await searchBingAPI(youtubeQuery);
|
||||
bingResult.results.forEach((result) => {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
const videoId = new URL(result.url).searchParams.get('v');
|
||||
videos.push({
|
||||
img_src: result.img_src,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
iframe_src: videoId
|
||||
? `https://www.youtube.com/embed/${videoId}`
|
||||
: null,
|
||||
});
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
||||
}
|
||||
|
||||
return videos;
|
||||
}
|
||||
|
||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
||||
return RunnableSequence.from([
|
||||
RunnableMap.from({
|
||||
@ -52,28 +152,7 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
||||
llm,
|
||||
strParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
const res = await searchSearxng(input, {
|
||||
engines: ['youtube'],
|
||||
});
|
||||
|
||||
const videos = [];
|
||||
|
||||
res.results.forEach((result) => {
|
||||
if (
|
||||
result.thumbnail &&
|
||||
result.url &&
|
||||
result.title &&
|
||||
result.iframe_src
|
||||
) {
|
||||
videos.push({
|
||||
img_src: result.thumbnail,
|
||||
url: result.url,
|
||||
title: result.title,
|
||||
iframe_src: result.iframe_src,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const videos = await performVideoSearch(input);
|
||||
return videos.slice(0, 10);
|
||||
}),
|
||||
]);
|
||||
|
143
src/config.ts
143
src/config.ts
@ -10,15 +10,51 @@ interface Config {
|
||||
SIMILARITY_MEASURE: string;
|
||||
KEEP_ALIVE: string;
|
||||
};
|
||||
API_KEYS: {
|
||||
OPENAI: string;
|
||||
GROQ: string;
|
||||
ANTHROPIC: string;
|
||||
GEMINI: string;
|
||||
SEARCH_ENGINE_BACKENDS: {
|
||||
SEARCH: string;
|
||||
IMAGE: string;
|
||||
VIDEO: string;
|
||||
NEWS: string;
|
||||
};
|
||||
API_ENDPOINTS: {
|
||||
SEARXNG: string;
|
||||
OLLAMA: string;
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
GROQ: {
|
||||
API_KEY: string;
|
||||
};
|
||||
ANTHROPIC: {
|
||||
API_KEY: string;
|
||||
};
|
||||
GEMINI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
OLLAMA: {
|
||||
API_URL: string;
|
||||
};
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
MODEL_NAME: string;
|
||||
};
|
||||
};
|
||||
SEARCH_ENGINES: {
|
||||
GOOGLE: {
|
||||
API_KEY: string;
|
||||
CSE_ID: string;
|
||||
};
|
||||
SEARXNG: {
|
||||
ENDPOINT: string;
|
||||
};
|
||||
BING: {
|
||||
SUBSCRIPTION_KEY: string;
|
||||
};
|
||||
BRAVE: {
|
||||
API_KEY: string;
|
||||
};
|
||||
YACY: {
|
||||
ENDPOINT: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@ -38,42 +74,89 @@ export const getSimilarityMeasure = () =>
|
||||
|
||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||
|
||||
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
||||
|
||||
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||
|
||||
export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC;
|
||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||
|
||||
export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI;
|
||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||
|
||||
export const getSearchEngineBackend = () =>
|
||||
loadConfig().SEARCH_ENGINE_BACKENDS.SEARCH;
|
||||
|
||||
export const getImageSearchEngineBackend = () =>
|
||||
loadConfig().SEARCH_ENGINE_BACKENDS.IMAGE || getSearchEngineBackend();
|
||||
|
||||
export const getVideoSearchEngineBackend = () =>
|
||||
loadConfig().SEARCH_ENGINE_BACKENDS.VIDEO || getSearchEngineBackend();
|
||||
|
||||
export const getNewsSearchEngineBackend = () =>
|
||||
loadConfig().SEARCH_ENGINE_BACKENDS.NEWS || getSearchEngineBackend();
|
||||
|
||||
export const getGoogleApiKey = () => loadConfig().SEARCH_ENGINES.GOOGLE.API_KEY;
|
||||
|
||||
export const getGoogleCseId = () => loadConfig().SEARCH_ENGINES.GOOGLE.CSE_ID;
|
||||
|
||||
export const getBraveApiKey = () => loadConfig().SEARCH_ENGINES.BRAVE.API_KEY;
|
||||
|
||||
export const getBingSubscriptionKey = () =>
|
||||
loadConfig().SEARCH_ENGINES.BING.SUBSCRIPTION_KEY;
|
||||
|
||||
export const getYacyJsonEndpoint = () =>
|
||||
loadConfig().SEARCH_ENGINES.YACY.ENDPOINT;
|
||||
|
||||
export const getSearxngApiEndpoint = () =>
|
||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
process.env.SEARXNG_API_URL || loadConfig().SEARCH_ENGINES.SEARXNG.ENDPOINT;
|
||||
|
||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
const currentConfig = loadConfig();
|
||||
export const getCustomOpenaiApiKey = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||
|
||||
for (const key in currentConfig) {
|
||||
if (!config[key]) config[key] = {};
|
||||
export const getCustomOpenaiApiUrl = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
||||
|
||||
if (typeof currentConfig[key] === 'object' && currentConfig[key] !== null) {
|
||||
for (const nestedKey in currentConfig[key]) {
|
||||
if (
|
||||
!config[key][nestedKey] &&
|
||||
currentConfig[key][nestedKey] &&
|
||||
config[key][nestedKey] !== ''
|
||||
) {
|
||||
config[key][nestedKey] = currentConfig[key][nestedKey];
|
||||
}
|
||||
export const getCustomOpenaiModelName = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||
|
||||
const mergeConfigs = (current: any, update: any): any => {
|
||||
if (update === null || update === undefined) {
|
||||
return current;
|
||||
}
|
||||
|
||||
if (typeof current !== 'object' || current === null) {
|
||||
return update;
|
||||
}
|
||||
|
||||
const result = { ...current };
|
||||
|
||||
for (const key in update) {
|
||||
if (Object.prototype.hasOwnProperty.call(update, key)) {
|
||||
const updateValue = update[key];
|
||||
|
||||
if (
|
||||
typeof updateValue === 'object' &&
|
||||
updateValue !== null &&
|
||||
typeof result[key] === 'object' &&
|
||||
result[key] !== null
|
||||
) {
|
||||
result[key] = mergeConfigs(result[key], updateValue);
|
||||
} else if (updateValue !== undefined) {
|
||||
result[key] = updateValue;
|
||||
}
|
||||
} else if (currentConfig[key] && config[key] !== '') {
|
||||
config[key] = currentConfig[key];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
const currentConfig = loadConfig();
|
||||
const mergedConfig = mergeConfigs(currentConfig, config);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, `../${configFileName}`),
|
||||
toml.stringify(config),
|
||||
toml.stringify(mergedConfig),
|
||||
);
|
||||
};
|
||||
|
@ -4,6 +4,12 @@ import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
|
||||
import { loadAnthropicChatModels } from './anthropic';
|
||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
||||
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../../config';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
const chatModelProviders = {
|
||||
openai: loadOpenAIChatModels,
|
||||
@ -30,7 +36,27 @@ export const getAvailableChatModelProviders = async () => {
|
||||
}
|
||||
}
|
||||
|
||||
models['custom_openai'] = {};
|
||||
const customOpenAiApiKey = getCustomOpenaiApiKey();
|
||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
||||
const customOpenAiModelName = getCustomOpenaiModelName();
|
||||
|
||||
models['custom_openai'] = {
|
||||
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
||||
? {
|
||||
[customOpenAiModelName]: {
|
||||
displayName: customOpenAiModelName,
|
||||
model: new ChatOpenAI({
|
||||
openAIApiKey: customOpenAiApiKey,
|
||||
modelName: customOpenAiModelName,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: customOpenAiApiUrl,
|
||||
},
|
||||
}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
return models;
|
||||
};
|
||||
|
105
src/lib/searchEngines/bing.ts
Normal file
105
src/lib/searchEngines/bing.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import axios from 'axios';
|
||||
import { getBingSubscriptionKey } from '../../config';
|
||||
|
||||
interface BingAPISearchResult {
|
||||
_type: string;
|
||||
name: string;
|
||||
url: string;
|
||||
displayUrl: string;
|
||||
snippet?: string;
|
||||
dateLastCrawled?: string;
|
||||
thumbnailUrl?: string;
|
||||
contentUrl?: string;
|
||||
hostPageUrl?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
accentColor?: string;
|
||||
contentSize?: string;
|
||||
datePublished?: string;
|
||||
encodingFormat?: string;
|
||||
hostPageDisplayUrl?: string;
|
||||
id?: string;
|
||||
isLicensed?: boolean;
|
||||
isFamilyFriendly?: boolean;
|
||||
language?: string;
|
||||
mediaUrl?: string;
|
||||
motionThumbnailUrl?: string;
|
||||
publisher?: string;
|
||||
viewCount?: number;
|
||||
webSearchUrl?: string;
|
||||
primaryImageOfPage?: {
|
||||
thumbnailUrl?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
};
|
||||
video?: {
|
||||
allowHttpsEmbed?: boolean;
|
||||
embedHtml?: string;
|
||||
allowMobileEmbed?: boolean;
|
||||
viewCount?: number;
|
||||
duration?: string;
|
||||
};
|
||||
image?: {
|
||||
thumbnail?: {
|
||||
contentUrl?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
};
|
||||
imageInsightsToken?: string;
|
||||
imageId?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export const searchBingAPI = async (query: string) => {
|
||||
try {
|
||||
const bingApiKey = await getBingSubscriptionKey();
|
||||
const url = new URL(`https://api.cognitive.microsoft.com/bing/v7.0/search`);
|
||||
url.searchParams.append('q', query);
|
||||
url.searchParams.append('responseFilter', 'Webpages,Images,Videos');
|
||||
|
||||
const res = await axios.get(url.toString(), {
|
||||
headers: {
|
||||
'Ocp-Apim-Subscription-Key': bingApiKey,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (res.data.error) {
|
||||
throw new Error(`Bing API Error: ${res.data.error.message}`);
|
||||
}
|
||||
|
||||
const originalres = res.data;
|
||||
|
||||
// Extract web, image, and video results
|
||||
const webResults = originalres.webPages?.value || [];
|
||||
const imageResults = originalres.images?.value || [];
|
||||
const videoResults = originalres.videos?.value || [];
|
||||
|
||||
const results = webResults.map((item: BingAPISearchResult) => ({
|
||||
title: item.name,
|
||||
url: item.url,
|
||||
content: item.snippet,
|
||||
img_src:
|
||||
item.primaryImageOfPage?.thumbnailUrl ||
|
||||
imageResults.find((img: any) => img.hostPageUrl === item.url)
|
||||
?.thumbnailUrl ||
|
||||
videoResults.find((vid: any) => vid.hostPageUrl === item.url)
|
||||
?.thumbnailUrl,
|
||||
...(item.video && {
|
||||
videoData: {
|
||||
duration: item.video.duration,
|
||||
embedUrl: item.video.embedHtml?.match(/src="(.*?)"/)?.[1],
|
||||
},
|
||||
publisher: item.publisher,
|
||||
datePublished: item.datePublished,
|
||||
}),
|
||||
}));
|
||||
|
||||
return { results, originalres };
|
||||
} catch (error) {
|
||||
const errorMessage = error.response?.data
|
||||
? JSON.stringify(error.response.data, null, 2)
|
||||
: error.message || 'Unknown error';
|
||||
throw new Error(`Bing API Error: ${errorMessage}`);
|
||||
}
|
||||
};
|
102
src/lib/searchEngines/brave.ts
Normal file
102
src/lib/searchEngines/brave.ts
Normal file
@ -0,0 +1,102 @@
|
||||
import axios from 'axios';
|
||||
import { getBraveApiKey } from '../../config';
|
||||
|
||||
interface BraveSearchResult {
|
||||
title: string;
|
||||
url: string;
|
||||
content?: string;
|
||||
img_src?: string;
|
||||
age?: string;
|
||||
family_friendly?: boolean;
|
||||
language?: string;
|
||||
video?: {
|
||||
embedUrl?: string;
|
||||
duration?: string;
|
||||
};
|
||||
rating?: {
|
||||
value: number;
|
||||
scale: number;
|
||||
};
|
||||
products?: Array<{
|
||||
name: string;
|
||||
price?: string;
|
||||
}>;
|
||||
recipe?: {
|
||||
ingredients?: string[];
|
||||
cookTime?: string;
|
||||
};
|
||||
meta?: {
|
||||
fetched?: string;
|
||||
lastCrawled?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export const searchBraveAPI = async (
|
||||
query: string,
|
||||
numResults: number = 20,
|
||||
): Promise<{ results: BraveSearchResult[]; originalres: any }> => {
|
||||
try {
|
||||
const braveApiKey = await getBraveApiKey();
|
||||
const url = new URL(`https://api.search.brave.com/res/v1/web/search`);
|
||||
|
||||
url.searchParams.append('q', query);
|
||||
url.searchParams.append('count', numResults.toString());
|
||||
|
||||
const res = await axios.get(url.toString(), {
|
||||
headers: {
|
||||
'X-Subscription-Token': braveApiKey,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (res.data.error) {
|
||||
throw new Error(`Brave API Error: ${res.data.error.message}`);
|
||||
}
|
||||
|
||||
const originalres = res.data;
|
||||
const webResults = originalres.web?.results || [];
|
||||
|
||||
const results: BraveSearchResult[] = webResults.map((item: any) => ({
|
||||
title: item.title,
|
||||
url: item.url,
|
||||
content: item.description,
|
||||
img_src: item.thumbnail?.src || item.deep_results?.images?.[0]?.src,
|
||||
age: item.age,
|
||||
family_friendly: item.family_friendly,
|
||||
language: item.language,
|
||||
video: item.video
|
||||
? {
|
||||
embedUrl: item.video.embed_url,
|
||||
duration: item.video.duration,
|
||||
}
|
||||
: undefined,
|
||||
rating: item.rating
|
||||
? {
|
||||
value: item.rating.value,
|
||||
scale: item.rating.scale_max,
|
||||
}
|
||||
: undefined,
|
||||
products: item.deep_results?.product_cluster?.map((p: any) => ({
|
||||
name: p.name,
|
||||
price: p.price,
|
||||
})),
|
||||
recipe: item.recipe
|
||||
? {
|
||||
ingredients: item.recipe.ingredients,
|
||||
cookTime: item.recipe.cook_time,
|
||||
}
|
||||
: undefined,
|
||||
meta: {
|
||||
fetched: item.meta?.fetched,
|
||||
lastCrawled: item.meta?.last_crawled,
|
||||
},
|
||||
}));
|
||||
|
||||
return { results, originalres };
|
||||
} catch (error) {
|
||||
const errorMessage = error.response?.data
|
||||
? JSON.stringify(error.response.data, null, 2)
|
||||
: error.message || 'Unknown error';
|
||||
throw new Error(`Brave API Error: ${errorMessage}`);
|
||||
}
|
||||
};
|
85
src/lib/searchEngines/google_pse.ts
Normal file
85
src/lib/searchEngines/google_pse.ts
Normal file
@ -0,0 +1,85 @@
|
||||
import axios from 'axios';
|
||||
import { getGoogleApiKey, getGoogleCseId } from '../../config';
|
||||
|
||||
interface GooglePSESearchResult {
|
||||
kind: string;
|
||||
title: string;
|
||||
htmlTitle: string;
|
||||
link: string;
|
||||
displayLink: string;
|
||||
snippet?: string;
|
||||
htmlSnippet?: string;
|
||||
cacheId?: string;
|
||||
formattedUrl: string;
|
||||
htmlFormattedUrl: string;
|
||||
pagemap?: {
|
||||
videoobject: any;
|
||||
cse_thumbnail?: Array<{
|
||||
src: string;
|
||||
width: string;
|
||||
height: string;
|
||||
}>;
|
||||
metatags?: Array<{
|
||||
[key: string]: string;
|
||||
author?: string;
|
||||
}>;
|
||||
cse_image?: Array<{
|
||||
src: string;
|
||||
}>;
|
||||
};
|
||||
fileFormat?: string;
|
||||
image?: {
|
||||
contextLink: string;
|
||||
thumbnailLink: string;
|
||||
};
|
||||
mime?: string;
|
||||
labels?: Array<{
|
||||
name: string;
|
||||
displayName: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export const searchGooglePSE = async (query: string) => {
|
||||
try {
|
||||
const [googleApiKey, googleCseID] = await Promise.all([
|
||||
getGoogleApiKey(),
|
||||
getGoogleCseId(),
|
||||
]);
|
||||
|
||||
const url = new URL(`https://www.googleapis.com/customsearch/v1`);
|
||||
url.searchParams.append('q', query);
|
||||
url.searchParams.append('cx', googleCseID);
|
||||
url.searchParams.append('key', googleApiKey);
|
||||
|
||||
const res = await axios.get(url.toString());
|
||||
|
||||
if (res.data.error) {
|
||||
throw new Error(`Google PSE Error: ${res.data.error.message}`);
|
||||
}
|
||||
|
||||
const originalres = res.data.items;
|
||||
|
||||
const results = originalres.map((item: GooglePSESearchResult) => ({
|
||||
title: item.title,
|
||||
url: item.link,
|
||||
content: item.snippet,
|
||||
img_src:
|
||||
item.pagemap?.cse_image?.[0]?.src ||
|
||||
item.pagemap?.cse_thumbnail?.[0]?.src ||
|
||||
item.image?.thumbnailLink,
|
||||
...(item.pagemap?.videoobject?.[0] && {
|
||||
videoData: {
|
||||
duration: item.pagemap.videoobject[0].duration,
|
||||
embedUrl: item.pagemap.videoobject[0].embedurl,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
return { results, originalres };
|
||||
} catch (error) {
|
||||
const errorMessage = error.response?.data
|
||||
? JSON.stringify(error.response.data, null, 2)
|
||||
: error.message || 'Unknown error';
|
||||
throw new Error(`Google PSE Error: ${errorMessage}`);
|
||||
}
|
||||
};
|
@ -1,5 +1,5 @@
|
||||
import axios from 'axios';
|
||||
import { getSearxngApiEndpoint } from '../config';
|
||||
import { getSearxngApiEndpoint } from '../../config';
|
||||
|
||||
interface SearxngSearchOptions {
|
||||
categories?: string[];
|
79
src/lib/searchEngines/yacy.ts
Normal file
79
src/lib/searchEngines/yacy.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import axios from 'axios';
|
||||
import { getYacyJsonEndpoint } from '../../config';
|
||||
|
||||
interface YaCySearchResult {
|
||||
channels: {
|
||||
title: string;
|
||||
description: string;
|
||||
link: string;
|
||||
image: {
|
||||
url: string;
|
||||
title: string;
|
||||
link: string;
|
||||
};
|
||||
startIndex: string;
|
||||
itemsPerPage: string;
|
||||
searchTerms: string;
|
||||
items: {
|
||||
title: string;
|
||||
link: string;
|
||||
code: string;
|
||||
description: string;
|
||||
pubDate: string;
|
||||
image?: string;
|
||||
size: string;
|
||||
sizename: string;
|
||||
guid: string;
|
||||
faviconUrl: string;
|
||||
host: string;
|
||||
path: string;
|
||||
file: string;
|
||||
urlhash: string;
|
||||
ranking: string;
|
||||
}[];
|
||||
navigation: {
|
||||
facetname: string;
|
||||
displayname: string;
|
||||
type: string;
|
||||
min: string;
|
||||
max: string;
|
||||
mean: string;
|
||||
elements: {
|
||||
name: string;
|
||||
count: string;
|
||||
modifier: string;
|
||||
url: string;
|
||||
}[];
|
||||
}[];
|
||||
}[];
|
||||
}
|
||||
|
||||
export const searchYaCy = async (query: string, numResults: number = 20) => {
|
||||
try {
|
||||
const yacyBaseUrl = getYacyJsonEndpoint();
|
||||
|
||||
const url = new URL(`${yacyBaseUrl}/yacysearch.json`);
|
||||
url.searchParams.append('query', query);
|
||||
url.searchParams.append('count', numResults.toString());
|
||||
|
||||
const res = await axios.get(url.toString());
|
||||
|
||||
const originalres = res.data as YaCySearchResult;
|
||||
|
||||
const results = originalres.channels[0].items.map((item) => ({
|
||||
title: item.title,
|
||||
url: item.link,
|
||||
content: item.description,
|
||||
img_src: item.image || null,
|
||||
pubDate: item.pubDate,
|
||||
host: item.host,
|
||||
}));
|
||||
|
||||
return { results, originalres };
|
||||
} catch (error) {
|
||||
const errorMessage = error.response?.data
|
||||
? JSON.stringify(error.response.data, null, 2)
|
||||
: error.message || 'Unknown error';
|
||||
throw new Error(`YaCy Error: ${errorMessage}`);
|
||||
}
|
||||
};
|
@ -1,6 +1,5 @@
|
||||
export const webSearchRetrieverPrompt = `
|
||||
You are an AI question rephraser. You will be given a conversation and a follow-up question, you will have to rephrase the follow up question so it is a standalone question and can be used by another LLM to search the web for information to answer it.
|
||||
If you receive a question in a language other than English and it is related to the region, culture, or customs associated with that language, process the question in the original language. Otherwise, translate it into English and proceed with the following instructions.
|
||||
If it is a smple writing task or a greeting (unless the greeting contains a question after it) like Hi, Hello, How are you, etc. than a question then you need to return \`not_needed\` as the response (This is because the LLM won't need to search the web for finding information on this topic).
|
||||
If the user asks some question from some URL or wants you to summarize a PDF or a webpage (via URL) you need to return the links inside the \`links\` XML block and the question inside the \`question\` XML block. If the user wants to you to summarize the webpage or the PDF you need to return \`summarize\` inside the \`question\` XML block in place of a question and the link to summarize in the \`links\` XML block.
|
||||
You must always return the rephrased question inside the \`question\` XML block, if there are no links in the follow-up question then don't insert a \`links\` XML block in your response.
|
||||
|
@ -10,6 +10,19 @@ import {
|
||||
getGeminiApiKey,
|
||||
getOpenaiApiKey,
|
||||
updateConfig,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiModelName,
|
||||
getSearchEngineBackend,
|
||||
getImageSearchEngineBackend,
|
||||
getVideoSearchEngineBackend,
|
||||
getNewsSearchEngineBackend,
|
||||
getSearxngApiEndpoint,
|
||||
getGoogleApiKey,
|
||||
getGoogleCseId,
|
||||
getBingSubscriptionKey,
|
||||
getBraveApiKey,
|
||||
getYacyJsonEndpoint,
|
||||
} from '../config';
|
||||
import logger from '../utils/logger';
|
||||
|
||||
@ -54,6 +67,24 @@ router.get('/', async (_, res) => {
|
||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
config['geminiApiKey'] = getGeminiApiKey();
|
||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||
|
||||
// Add search engine configuration
|
||||
config['searchEngineBackends'] = {
|
||||
search: getSearchEngineBackend(),
|
||||
image: getImageSearchEngineBackend(),
|
||||
video: getVideoSearchEngineBackend(),
|
||||
news: getNewsSearchEngineBackend(),
|
||||
};
|
||||
|
||||
config['searxngEndpoint'] = getSearxngApiEndpoint();
|
||||
config['googleApiKey'] = getGoogleApiKey();
|
||||
config['googleCseId'] = getGoogleCseId();
|
||||
config['bingSubscriptionKey'] = getBingSubscriptionKey();
|
||||
config['braveApiKey'] = getBraveApiKey();
|
||||
config['yacyEndpoint'] = getYacyJsonEndpoint();
|
||||
|
||||
res.status(200).json(config);
|
||||
} catch (err: any) {
|
||||
@ -66,14 +97,51 @@ router.post('/', async (req, res) => {
|
||||
const config = req.body;
|
||||
|
||||
const updatedConfig = {
|
||||
API_KEYS: {
|
||||
OPENAI: config.openaiApiKey,
|
||||
GROQ: config.groqApiKey,
|
||||
ANTHROPIC: config.anthropicApiKey,
|
||||
GEMINI: config.geminiApiKey,
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: config.openaiApiKey,
|
||||
},
|
||||
GROQ: {
|
||||
API_KEY: config.groqApiKey,
|
||||
},
|
||||
ANTHROPIC: {
|
||||
API_KEY: config.anthropicApiKey,
|
||||
},
|
||||
GEMINI: {
|
||||
API_KEY: config.geminiApiKey,
|
||||
},
|
||||
OLLAMA: {
|
||||
API_URL: config.ollamaApiUrl,
|
||||
},
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: config.customOpenaiApiUrl,
|
||||
API_KEY: config.customOpenaiApiKey,
|
||||
MODEL_NAME: config.customOpenaiModelName,
|
||||
},
|
||||
},
|
||||
API_ENDPOINTS: {
|
||||
OLLAMA: config.ollamaApiUrl,
|
||||
SEARCH_ENGINE_BACKENDS: config.searchEngineBackends ? {
|
||||
SEARCH: config.searchEngineBackends.search,
|
||||
IMAGE: config.searchEngineBackends.image,
|
||||
VIDEO: config.searchEngineBackends.video,
|
||||
NEWS: config.searchEngineBackends.news,
|
||||
} : undefined,
|
||||
SEARCH_ENGINES: {
|
||||
GOOGLE: {
|
||||
API_KEY: config.googleApiKey,
|
||||
CSE_ID: config.googleCseId,
|
||||
},
|
||||
SEARXNG: {
|
||||
ENDPOINT: config.searxngEndpoint,
|
||||
},
|
||||
BING: {
|
||||
SUBSCRIPTION_KEY: config.bingSubscriptionKey,
|
||||
},
|
||||
BRAVE: {
|
||||
API_KEY: config.braveApiKey,
|
||||
},
|
||||
YACY: {
|
||||
ENDPOINT: config.yacyEndpoint,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -1,42 +1,125 @@
|
||||
import express from 'express';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
||||
import { getNewsSearchEngineBackend } from '../config';
|
||||
import logger from '../utils/logger';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
async function performSearch(query: string, site: string) {
|
||||
const searchEngine = getNewsSearchEngineBackend();
|
||||
switch (searchEngine) {
|
||||
case 'google': {
|
||||
const googleResult = await searchGooglePSE(query);
|
||||
|
||||
return googleResult.originalres.map((item) => {
|
||||
const imageSources = [
|
||||
item.pagemap?.cse_image?.[0]?.src,
|
||||
item.pagemap?.cse_thumbnail?.[0]?.src,
|
||||
item.pagemap?.metatags?.[0]?.['og:image'],
|
||||
item.pagemap?.metatags?.[0]?.['twitter:image'],
|
||||
item.pagemap?.metatags?.[0]?.['image'],
|
||||
].filter(Boolean); // Remove undefined values
|
||||
|
||||
return {
|
||||
title: item.title,
|
||||
url: item.link,
|
||||
content: item.snippet,
|
||||
thumbnail: imageSources[0], // First available image
|
||||
img_src: imageSources[0], // Same as thumbnail for consistency
|
||||
iframe_src: null,
|
||||
author: item.pagemap?.metatags?.[0]?.['og:site_name'] || site,
|
||||
publishedDate:
|
||||
item.pagemap?.metatags?.[0]?.['article:published_time'],
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
case 'searxng': {
|
||||
const searxResult = await searchSearxng(query, {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
});
|
||||
return searxResult.results;
|
||||
}
|
||||
|
||||
case 'brave': {
|
||||
const braveResult = await searchBraveAPI(query);
|
||||
return braveResult.results.map((item) => ({
|
||||
title: item.title,
|
||||
url: item.url,
|
||||
content: item.content,
|
||||
thumbnail: item.img_src,
|
||||
img_src: item.img_src,
|
||||
iframe_src: null,
|
||||
author: item.meta?.fetched || site,
|
||||
publishedDate: item.meta?.lastCrawled,
|
||||
}));
|
||||
}
|
||||
|
||||
case 'yacy': {
|
||||
const yacyResult = await searchYaCy(query);
|
||||
return yacyResult.results.map((item) => ({
|
||||
title: item.title,
|
||||
url: item.url,
|
||||
content: item.content,
|
||||
thumbnail: item.img_src,
|
||||
img_src: item.img_src,
|
||||
iframe_src: null,
|
||||
author: item?.host || site,
|
||||
publishedDate: item?.pubDate,
|
||||
}));
|
||||
}
|
||||
|
||||
case 'bing': {
|
||||
const bingResult = await searchBingAPI(query);
|
||||
return bingResult.results.map((item) => ({
|
||||
title: item.title,
|
||||
url: item.url,
|
||||
content: item.content,
|
||||
thumbnail: item.img_src,
|
||||
img_src: item.img_src,
|
||||
iframe_src: null,
|
||||
author: item?.publisher || site,
|
||||
publishedDate: item?.datePublished,
|
||||
}));
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
||||
}
|
||||
}
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const queries = [
|
||||
{ site: 'businessinsider.com', topic: 'AI' },
|
||||
{ site: 'www.exchangewire.com', topic: 'AI' },
|
||||
{ site: 'yahoo.com', topic: 'AI' },
|
||||
{ site: 'businessinsider.com', topic: 'tech' },
|
||||
{ site: 'www.exchangewire.com', topic: 'tech' },
|
||||
{ site: 'yahoo.com', topic: 'tech' },
|
||||
];
|
||||
|
||||
const data = (
|
||||
await Promise.all([
|
||||
searchSearxng('site:businessinsider.com AI', {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
await Promise.all(
|
||||
queries.map(async ({ site, topic }) => {
|
||||
try {
|
||||
const query = `site:${site} ${topic}`;
|
||||
return await performSearch(query, site);
|
||||
} catch (error) {
|
||||
logger.error(`Error searching ${site}: ${error.message}`);
|
||||
return [];
|
||||
}
|
||||
}),
|
||||
searchSearxng('site:www.exchangewire.com AI', {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
}),
|
||||
searchSearxng('site:yahoo.com AI', {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
}),
|
||||
searchSearxng('site:businessinsider.com tech', {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
}),
|
||||
searchSearxng('site:www.exchangewire.com tech', {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
}),
|
||||
searchSearxng('site:yahoo.com tech', {
|
||||
engines: ['bing news'],
|
||||
pageno: 1,
|
||||
}),
|
||||
])
|
||||
)
|
||||
)
|
||||
.map((result) => result.results)
|
||||
.flat()
|
||||
.sort(() => Math.random() - 0.5);
|
||||
.sort(() => Math.random() - 0.5)
|
||||
.filter((item) => item.title && item.url && item.content);
|
||||
|
||||
return res.json({ blogs: data });
|
||||
} catch (err: any) {
|
||||
|
@ -5,14 +5,17 @@ import { getAvailableChatModelProviders } from '../lib/providers';
|
||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import logger from '../utils/logger';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
customOpenAIBaseURL?: string;
|
||||
customOpenAIKey?: string;
|
||||
}
|
||||
|
||||
interface ImageSearchBody {
|
||||
@ -44,21 +47,12 @@ router.post('/', async (req, res) => {
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
if (
|
||||
!body.chatModel?.customOpenAIBaseURL ||
|
||||
!body.chatModel?.customOpenAIKey
|
||||
) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||
}
|
||||
|
||||
llm = new ChatOpenAI({
|
||||
modelName: body.chatModel.model,
|
||||
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
openAIApiKey: getCustomOpenaiApiKey(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (
|
||||
|
@ -10,14 +10,19 @@ import {
|
||||
import { searchHandlers } from '../websocket/messageHandler';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { MetaSearchAgentType } from '../search/metaSearchAgent';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
interface chatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
customOpenAIBaseURL?: string;
|
||||
customOpenAIKey?: string;
|
||||
customOpenAIBaseURL?: string;
|
||||
}
|
||||
|
||||
interface embeddingModel {
|
||||
@ -78,21 +83,14 @@ router.post('/', async (req, res) => {
|
||||
let embeddings: Embeddings | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
if (
|
||||
!body.chatModel?.customOpenAIBaseURL ||
|
||||
!body.chatModel?.customOpenAIKey
|
||||
) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||
}
|
||||
|
||||
llm = new ChatOpenAI({
|
||||
modelName: body.chatModel.model,
|
||||
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||
modelName: body.chatModel?.model || getCustomOpenaiModelName(),
|
||||
openAIApiKey:
|
||||
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||
baseURL:
|
||||
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (
|
||||
|
@ -5,14 +5,17 @@ import { getAvailableChatModelProviders } from '../lib/providers';
|
||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import logger from '../utils/logger';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
customOpenAIBaseURL?: string;
|
||||
customOpenAIKey?: string;
|
||||
}
|
||||
|
||||
interface SuggestionsBody {
|
||||
@ -43,21 +46,12 @@ router.post('/', async (req, res) => {
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
if (
|
||||
!body.chatModel?.customOpenAIBaseURL ||
|
||||
!body.chatModel?.customOpenAIKey
|
||||
) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||
}
|
||||
|
||||
llm = new ChatOpenAI({
|
||||
modelName: body.chatModel.model,
|
||||
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
openAIApiKey: getCustomOpenaiApiKey(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (
|
||||
|
@ -5,14 +5,17 @@ import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import logger from '../utils/logger';
|
||||
import handleVideoSearch from '../chains/videoSearchAgent';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
customOpenAIBaseURL?: string;
|
||||
customOpenAIKey?: string;
|
||||
}
|
||||
|
||||
interface VideoSearchBody {
|
||||
@ -44,21 +47,12 @@ router.post('/', async (req, res) => {
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
if (
|
||||
!body.chatModel?.customOpenAIBaseURL ||
|
||||
!body.chatModel?.customOpenAIKey
|
||||
) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||
}
|
||||
|
||||
llm = new ChatOpenAI({
|
||||
modelName: body.chatModel.model,
|
||||
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
openAIApiKey: getCustomOpenaiApiKey(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (
|
||||
|
@ -17,7 +17,12 @@ import LineListOutputParser from '../lib/outputParsers/listLineOutputParser';
|
||||
import LineOutputParser from '../lib/outputParsers/lineOutputParser';
|
||||
import { getDocumentsFromLinks } from '../utils/documents';
|
||||
import { Document } from 'langchain/document';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
||||
import { getSearchEngineBackend } from '../config';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import computeSimilarity from '../utils/computeSimilarity';
|
||||
@ -132,7 +137,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
||||
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
|
||||
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
|
||||
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
|
||||
|
||||
|
||||
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
|
||||
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
|
||||
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
|
||||
@ -203,10 +208,37 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
||||
|
||||
return { query: question, docs: docs };
|
||||
} else {
|
||||
const res = await searchSearxng(question, {
|
||||
language: 'en',
|
||||
engines: this.config.activeEngines,
|
||||
});
|
||||
const searchEngine = getSearchEngineBackend();
|
||||
|
||||
let res;
|
||||
switch (searchEngine) {
|
||||
case 'searxng':
|
||||
res = await searchSearxng(question, {
|
||||
language: 'en',
|
||||
engines: this.config.activeEngines,
|
||||
});
|
||||
break;
|
||||
case 'google':
|
||||
res = await searchGooglePSE(question);
|
||||
break;
|
||||
case 'bing':
|
||||
res = await searchBingAPI(question);
|
||||
break;
|
||||
case 'brave':
|
||||
res = await searchBraveAPI(question);
|
||||
break;
|
||||
case 'yacy':
|
||||
res = await searchYaCy(question);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
||||
}
|
||||
|
||||
if (!res?.results) {
|
||||
throw new Error(
|
||||
`No results found for search engine: ${searchEngine}`,
|
||||
);
|
||||
}
|
||||
|
||||
const documents = res.results.map(
|
||||
(result) =>
|
||||
|
@ -9,6 +9,11 @@ import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import type { IncomingMessage } from 'http';
|
||||
import logger from '../utils/logger';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
|
||||
export const handleConnection = async (
|
||||
ws: WebSocket,
|
||||
@ -48,14 +53,20 @@ export const handleConnection = async (
|
||||
llm = chatModelProviders[chatModelProvider][chatModel]
|
||||
.model as unknown as BaseChatModel | undefined;
|
||||
} else if (chatModelProvider == 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
modelName: chatModel,
|
||||
openAIApiKey: searchParams.get('openAIApiKey'),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: searchParams.get('openAIBaseURL'),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
const customOpenaiApiKey = getCustomOpenaiApiKey();
|
||||
const customOpenaiApiUrl = getCustomOpenaiApiUrl();
|
||||
const customOpenaiModelName = getCustomOpenaiModelName();
|
||||
|
||||
if (customOpenaiApiKey && customOpenaiApiUrl && customOpenaiModelName) {
|
||||
llm = new ChatOpenAI({
|
||||
modelName: customOpenaiModelName,
|
||||
openAIApiKey: customOpenaiApiKey,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: customOpenaiApiUrl,
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
|
1061
ui/app/settings/page.tsx
Normal file
1061
ui/app/settings/page.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,7 @@ import { toast } from 'sonner';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
import { getSuggestions } from '@/lib/actions';
|
||||
import { Settings } from 'lucide-react';
|
||||
import SettingsDialog from './SettingsDialog';
|
||||
import Link from 'next/link';
|
||||
import NextError from 'next/error';
|
||||
|
||||
export type Message = {
|
||||
@ -40,6 +40,7 @@ const useSocket = (
|
||||
const isCleaningUpRef = useRef(false);
|
||||
const MAX_RETRIES = 3;
|
||||
const INITIAL_BACKOFF = 1000; // 1 second
|
||||
const isConnectionErrorRef = useRef(false);
|
||||
|
||||
const getBackoffDelay = (retryCount: number) => {
|
||||
return Math.min(INITIAL_BACKOFF * Math.pow(2, retryCount), 10000); // Cap at 10 seconds
|
||||
@ -58,14 +59,17 @@ const useSocket = (
|
||||
let embeddingModelProvider = localStorage.getItem(
|
||||
'embeddingModelProvider',
|
||||
);
|
||||
let openAIBaseURL =
|
||||
chatModelProvider === 'custom_openai'
|
||||
? localStorage.getItem('openAIBaseURL')
|
||||
: null;
|
||||
let openAIPIKey =
|
||||
chatModelProvider === 'custom_openai'
|
||||
? localStorage.getItem('openAIApiKey')
|
||||
: null;
|
||||
|
||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
||||
|
||||
if (!autoImageSearch) {
|
||||
localStorage.setItem('autoImageSearch', 'true');
|
||||
}
|
||||
|
||||
if (!autoVideoSearch) {
|
||||
localStorage.setItem('autoVideoSearch', 'false');
|
||||
}
|
||||
|
||||
const providers = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||
@ -94,21 +98,13 @@ const useSocket = (
|
||||
chatModelProvider =
|
||||
chatModelProvider || Object.keys(chatModelProviders)[0];
|
||||
|
||||
if (chatModelProvider === 'custom_openai') {
|
||||
toast.error(
|
||||
'Seems like you are using the custom OpenAI provider, please open the settings and enter a model name to use.',
|
||||
);
|
||||
setError(true);
|
||||
return;
|
||||
} else {
|
||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
|
||||
if (
|
||||
!chatModelProviders ||
|
||||
Object.keys(chatModelProviders).length === 0
|
||||
)
|
||||
return toast.error('No chat models available');
|
||||
}
|
||||
if (
|
||||
!chatModelProviders ||
|
||||
Object.keys(chatModelProviders).length === 0
|
||||
)
|
||||
return toast.error('No chat models available');
|
||||
}
|
||||
|
||||
if (!embeddingModel || !embeddingModelProvider) {
|
||||
@ -139,9 +135,7 @@ const useSocket = (
|
||||
|
||||
if (
|
||||
Object.keys(chatModelProviders).length > 0 &&
|
||||
(((!openAIBaseURL || !openAIPIKey) &&
|
||||
chatModelProvider === 'custom_openai') ||
|
||||
!chatModelProviders[chatModelProvider])
|
||||
!chatModelProviders[chatModelProvider]
|
||||
) {
|
||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
||||
chatModelProvider =
|
||||
@ -149,23 +143,11 @@ const useSocket = (
|
||||
(key) => Object.keys(chatModelProviders[key]).length > 0,
|
||||
) || chatModelProvidersKeys[0];
|
||||
|
||||
if (
|
||||
chatModelProvider === 'custom_openai' &&
|
||||
(!openAIBaseURL || !openAIPIKey)
|
||||
) {
|
||||
toast.error(
|
||||
'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL',
|
||||
);
|
||||
setError(true);
|
||||
return;
|
||||
}
|
||||
|
||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||
}
|
||||
|
||||
if (
|
||||
chatModelProvider &&
|
||||
(!openAIBaseURL || !openAIPIKey) &&
|
||||
!chatModelProviders[chatModelProvider][chatModel]
|
||||
) {
|
||||
chatModel = Object.keys(
|
||||
@ -251,6 +233,8 @@ const useSocket = (
|
||||
console.debug(new Date(), 'ws:connected');
|
||||
}
|
||||
if (data.type === 'error') {
|
||||
isConnectionErrorRef.current = true;
|
||||
setError(true);
|
||||
toast.error(data.data);
|
||||
}
|
||||
});
|
||||
@ -265,7 +249,7 @@ const useSocket = (
|
||||
clearTimeout(timeoutId);
|
||||
setIsWSReady(false);
|
||||
console.debug(new Date(), 'ws:disconnected');
|
||||
if (!isCleaningUpRef.current) {
|
||||
if (!isCleaningUpRef.current && !isConnectionErrorRef.current) {
|
||||
toast.error('Connection lost. Attempting to reconnect...');
|
||||
attemptReconnect();
|
||||
}
|
||||
@ -595,6 +579,17 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
||||
|
||||
if (autoImageSearch === 'true') {
|
||||
document.getElementById('search-images')?.click();
|
||||
}
|
||||
|
||||
if (autoVideoSearch === 'true') {
|
||||
document.getElementById('search-videos')?.click();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -629,17 +624,15 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
||||
return (
|
||||
<div className="relative">
|
||||
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
||||
<Settings
|
||||
className="cursor-pointer lg:hidden"
|
||||
onClick={() => setIsSettingsOpen(true)}
|
||||
/>
|
||||
<Link href="/settings">
|
||||
<Settings className="cursor-pointer lg:hidden" />
|
||||
</Link>
|
||||
</div>
|
||||
<div className="flex flex-col items-center justify-center min-h-screen">
|
||||
<p className="dark:text-white/70 text-black/70 text-sm">
|
||||
Failed to connect to the server. Please try again later.
|
||||
</p>
|
||||
</div>
|
||||
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { Settings } from 'lucide-react';
|
||||
import EmptyChatMessageInput from './EmptyChatMessageInput';
|
||||
import SettingsDialog from './SettingsDialog';
|
||||
import { useState } from 'react';
|
||||
import { File } from './ChatWindow';
|
||||
import Link from 'next/link';
|
||||
|
||||
const EmptyChat = ({
|
||||
sendMessage,
|
||||
@ -29,12 +29,10 @@ const EmptyChat = ({
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
|
||||
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
||||
<Settings
|
||||
className="cursor-pointer lg:hidden"
|
||||
onClick={() => setIsSettingsOpen(true)}
|
||||
/>
|
||||
<Link href="/settings">
|
||||
<Settings className="cursor-pointer lg:hidden" />
|
||||
</Link>
|
||||
</div>
|
||||
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
|
||||
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
|
||||
|
@ -68,7 +68,13 @@ const MessageBox = ({
|
||||
return (
|
||||
<div>
|
||||
{message.role === 'user' && (
|
||||
<div className={cn('w-full', messageIndex === 0 ? 'pt-16' : 'pt-8')}>
|
||||
<div
|
||||
className={cn(
|
||||
'w-full',
|
||||
messageIndex === 0 ? 'pt-16' : 'pt-8',
|
||||
'break-words',
|
||||
)}
|
||||
>
|
||||
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
|
||||
{message.content}
|
||||
</h2>
|
||||
|
@ -110,7 +110,7 @@ const Attach = ({
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
@ -128,7 +128,7 @@ const Attach = ({
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<Trash size={14} />
|
||||
<p className="text-xs">Clear</p>
|
||||
@ -145,7 +145,7 @@ const Attach = ({
|
||||
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||
<File size={16} className="text-white/70" />
|
||||
</div>
|
||||
<p className="text-white/70 text-sm">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
{file.fileName.length > 25
|
||||
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
||||
'...' +
|
||||
|
@ -82,7 +82,7 @@ const AttachSmall = ({
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
@ -100,7 +100,7 @@ const AttachSmall = ({
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<Trash size={14} />
|
||||
<p className="text-xs">Clear</p>
|
||||
@ -117,7 +117,7 @@ const AttachSmall = ({
|
||||
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||
<File size={16} className="text-white/70" />
|
||||
</div>
|
||||
<p className="text-white/70 text-sm">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
{file.fileName.length > 25
|
||||
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
||||
'...' +
|
||||
|
@ -27,6 +27,7 @@ const SearchImages = ({
|
||||
<>
|
||||
{!loading && images === null && (
|
||||
<button
|
||||
id="search-images"
|
||||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
|
@ -42,6 +42,7 @@ const Searchvideos = ({
|
||||
<>
|
||||
{!loading && videos === null && (
|
||||
<button
|
||||
id="search-videos"
|
||||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
|
@ -1,528 +0,0 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
Dialog,
|
||||
DialogPanel,
|
||||
DialogTitle,
|
||||
Transition,
|
||||
TransitionChild,
|
||||
} from '@headlessui/react';
|
||||
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
|
||||
import React, {
|
||||
Fragment,
|
||||
useEffect,
|
||||
useState,
|
||||
type SelectHTMLAttributes,
|
||||
} from 'react';
|
||||
import ThemeSwitcher from './theme/Switcher';
|
||||
|
||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||
|
||||
const Input = ({ className, ...restProps }: InputProps) => {
|
||||
return (
|
||||
<input
|
||||
{...restProps}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||
className,
|
||||
)}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
||||
options: { value: string; label: string; disabled?: boolean }[];
|
||||
}
|
||||
|
||||
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
||||
return (
|
||||
<select
|
||||
{...restProps}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
{options.map(({ label, value, disabled }) => {
|
||||
return (
|
||||
<option key={value} value={value} disabled={disabled}>
|
||||
{label}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</select>
|
||||
);
|
||||
};
|
||||
|
||||
interface SettingsType {
|
||||
chatModelProviders: {
|
||||
[key: string]: [Record<string, any>];
|
||||
};
|
||||
embeddingModelProviders: {
|
||||
[key: string]: [Record<string, any>];
|
||||
};
|
||||
openaiApiKey: string;
|
||||
groqApiKey: string;
|
||||
anthropicApiKey: string;
|
||||
geminiApiKey: string;
|
||||
ollamaApiUrl: string;
|
||||
}
|
||||
|
||||
const SettingsDialog = ({
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
setIsOpen: (isOpen: boolean) => void;
|
||||
}) => {
|
||||
const [config, setConfig] = useState<SettingsType | null>(null);
|
||||
const [chatModels, setChatModels] = useState<Record<string, any>>({});
|
||||
const [embeddingModels, setEmbeddingModels] = useState<Record<string, any>>(
|
||||
{},
|
||||
);
|
||||
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<
|
||||
string | null
|
||||
>(null);
|
||||
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] =
|
||||
useState<string | null>(null);
|
||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||
string | null
|
||||
>(null);
|
||||
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>('');
|
||||
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>('');
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
const fetchConfig = async () => {
|
||||
setIsLoading(true);
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const data = (await res.json()) as SettingsType;
|
||||
setConfig(data);
|
||||
|
||||
const chatModelProvidersKeys = Object.keys(
|
||||
data.chatModelProviders || {},
|
||||
);
|
||||
const embeddingModelProvidersKeys = Object.keys(
|
||||
data.embeddingModelProviders || {},
|
||||
);
|
||||
|
||||
const defaultChatModelProvider =
|
||||
chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
|
||||
const defaultEmbeddingModelProvider =
|
||||
embeddingModelProvidersKeys.length > 0
|
||||
? embeddingModelProvidersKeys[0]
|
||||
: '';
|
||||
|
||||
const chatModelProvider =
|
||||
localStorage.getItem('chatModelProvider') ||
|
||||
defaultChatModelProvider ||
|
||||
'';
|
||||
const chatModel =
|
||||
localStorage.getItem('chatModel') ||
|
||||
(data.chatModelProviders &&
|
||||
data.chatModelProviders[chatModelProvider]?.length > 0
|
||||
? data.chatModelProviders[chatModelProvider][0].name
|
||||
: undefined) ||
|
||||
'';
|
||||
const embeddingModelProvider =
|
||||
localStorage.getItem('embeddingModelProvider') ||
|
||||
defaultEmbeddingModelProvider ||
|
||||
'';
|
||||
const embeddingModel =
|
||||
localStorage.getItem('embeddingModel') ||
|
||||
(data.embeddingModelProviders &&
|
||||
data.embeddingModelProviders[embeddingModelProvider]?.[0].name) ||
|
||||
'';
|
||||
|
||||
setSelectedChatModelProvider(chatModelProvider);
|
||||
setSelectedChatModel(chatModel);
|
||||
setSelectedEmbeddingModelProvider(embeddingModelProvider);
|
||||
setSelectedEmbeddingModel(embeddingModel);
|
||||
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
||||
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || '');
|
||||
setChatModels(data.chatModelProviders || {});
|
||||
setEmbeddingModels(data.embeddingModelProviders || {});
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
fetchConfig();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isOpen]);
|
||||
|
||||
const handleSubmit = async () => {
|
||||
setIsUpdating(true);
|
||||
|
||||
try {
|
||||
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(config),
|
||||
});
|
||||
|
||||
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
||||
localStorage.setItem('chatModel', selectedChatModel!);
|
||||
localStorage.setItem(
|
||||
'embeddingModelProvider',
|
||||
selectedEmbeddingModelProvider!,
|
||||
);
|
||||
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
|
||||
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
|
||||
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
} finally {
|
||||
setIsUpdating(false);
|
||||
setIsOpen(false);
|
||||
|
||||
window.location.reload();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Transition appear show={isOpen} as={Fragment}>
|
||||
<Dialog
|
||||
as="div"
|
||||
className="relative z-50"
|
||||
onClose={() => setIsOpen(false)}
|
||||
>
|
||||
<TransitionChild
|
||||
as={Fragment}
|
||||
enter="ease-out duration-300"
|
||||
enterFrom="opacity-0"
|
||||
enterTo="opacity-100"
|
||||
leave="ease-in duration-200"
|
||||
leaveFrom="opacity-100"
|
||||
leaveTo="opacity-0"
|
||||
>
|
||||
<div className="fixed inset-0 bg-white/50 dark:bg-black/50" />
|
||||
</TransitionChild>
|
||||
<div className="fixed inset-0 overflow-y-auto">
|
||||
<div className="flex min-h-full items-center justify-center p-4 text-center">
|
||||
<TransitionChild
|
||||
as={Fragment}
|
||||
enter="ease-out duration-200"
|
||||
enterFrom="opacity-0 scale-95"
|
||||
enterTo="opacity-100 scale-100"
|
||||
leave="ease-in duration-100"
|
||||
leaveFrom="opacity-100 scale-200"
|
||||
leaveTo="opacity-0 scale-95"
|
||||
>
|
||||
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
|
||||
<DialogTitle className="text-xl font-medium leading-6 dark:text-white">
|
||||
Settings
|
||||
</DialogTitle>
|
||||
{config && !isLoading && (
|
||||
<div className="flex flex-col space-y-4 mt-6">
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Theme
|
||||
</p>
|
||||
<ThemeSwitcher />
|
||||
</div>
|
||||
{config.chatModelProviders && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Chat model Provider
|
||||
</p>
|
||||
<Select
|
||||
value={selectedChatModelProvider ?? undefined}
|
||||
onChange={(e) => {
|
||||
setSelectedChatModelProvider(e.target.value);
|
||||
if (e.target.value === 'custom_openai') {
|
||||
setSelectedChatModel('');
|
||||
} else {
|
||||
setSelectedChatModel(
|
||||
config.chatModelProviders[e.target.value][0]
|
||||
.name,
|
||||
);
|
||||
}
|
||||
}}
|
||||
options={Object.keys(config.chatModelProviders).map(
|
||||
(provider) => ({
|
||||
value: provider,
|
||||
label:
|
||||
provider.charAt(0).toUpperCase() +
|
||||
provider.slice(1),
|
||||
}),
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{selectedChatModelProvider &&
|
||||
selectedChatModelProvider != 'custom_openai' && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Chat Model
|
||||
</p>
|
||||
<Select
|
||||
value={selectedChatModel ?? undefined}
|
||||
onChange={(e) =>
|
||||
setSelectedChatModel(e.target.value)
|
||||
}
|
||||
options={(() => {
|
||||
const chatModelProvider =
|
||||
config.chatModelProviders[
|
||||
selectedChatModelProvider
|
||||
];
|
||||
|
||||
return chatModelProvider
|
||||
? chatModelProvider.length > 0
|
||||
? chatModelProvider.map((model) => ({
|
||||
value: model.name,
|
||||
label: model.displayName,
|
||||
}))
|
||||
: [
|
||||
{
|
||||
value: '',
|
||||
label: 'No models available',
|
||||
disabled: true,
|
||||
},
|
||||
]
|
||||
: [
|
||||
{
|
||||
value: '',
|
||||
label:
|
||||
'Invalid provider, please check backend logs',
|
||||
disabled: true,
|
||||
},
|
||||
];
|
||||
})()}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{selectedChatModelProvider &&
|
||||
selectedChatModelProvider === 'custom_openai' && (
|
||||
<>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Model name
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Model name"
|
||||
defaultValue={selectedChatModel!}
|
||||
onChange={(e) =>
|
||||
setSelectedChatModel(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Custom OpenAI API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Custom OpenAI API Key"
|
||||
defaultValue={customOpenAIApiKey!}
|
||||
onChange={(e) =>
|
||||
setCustomOpenAIApiKey(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Custom OpenAI Base URL
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Custom OpenAI Base URL"
|
||||
defaultValue={customOpenAIBaseURL!}
|
||||
onChange={(e) =>
|
||||
setCustomOpenAIBaseURL(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{/* Embedding models */}
|
||||
{config.embeddingModelProviders && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Embedding model Provider
|
||||
</p>
|
||||
<Select
|
||||
value={selectedEmbeddingModelProvider ?? undefined}
|
||||
onChange={(e) => {
|
||||
setSelectedEmbeddingModelProvider(e.target.value);
|
||||
setSelectedEmbeddingModel(
|
||||
config.embeddingModelProviders[e.target.value][0]
|
||||
.name,
|
||||
);
|
||||
}}
|
||||
options={Object.keys(
|
||||
config.embeddingModelProviders,
|
||||
).map((provider) => ({
|
||||
label:
|
||||
provider.charAt(0).toUpperCase() +
|
||||
provider.slice(1),
|
||||
value: provider,
|
||||
}))}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{selectedEmbeddingModelProvider && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Embedding Model
|
||||
</p>
|
||||
<Select
|
||||
value={selectedEmbeddingModel ?? undefined}
|
||||
onChange={(e) =>
|
||||
setSelectedEmbeddingModel(e.target.value)
|
||||
}
|
||||
options={(() => {
|
||||
const embeddingModelProvider =
|
||||
config.embeddingModelProviders[
|
||||
selectedEmbeddingModelProvider
|
||||
];
|
||||
|
||||
return embeddingModelProvider
|
||||
? embeddingModelProvider.length > 0
|
||||
? embeddingModelProvider.map((model) => ({
|
||||
label: model.displayName,
|
||||
value: model.name,
|
||||
}))
|
||||
: [
|
||||
{
|
||||
label: 'No embedding models available',
|
||||
value: '',
|
||||
disabled: true,
|
||||
},
|
||||
]
|
||||
: [
|
||||
{
|
||||
label:
|
||||
'Invalid provider, please check backend logs',
|
||||
value: '',
|
||||
disabled: true,
|
||||
},
|
||||
];
|
||||
})()}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
OpenAI API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="OpenAI API Key"
|
||||
defaultValue={config.openaiApiKey}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
openaiApiKey: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Ollama API URL
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Ollama API URL"
|
||||
defaultValue={config.ollamaApiUrl}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
ollamaApiUrl: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
GROQ API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="GROQ API Key"
|
||||
defaultValue={config.groqApiKey}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
groqApiKey: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Anthropic API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Anthropic API key"
|
||||
defaultValue={config.anthropicApiKey}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
anthropicApiKey: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Gemini API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Gemini API key"
|
||||
defaultValue={config.geminiApiKey}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
geminiApiKey: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{isLoading && (
|
||||
<div className="w-full flex items-center justify-center mt-6 text-black/70 dark:text-white/70 py-6">
|
||||
<RefreshCcw className="animate-spin" />
|
||||
</div>
|
||||
)}
|
||||
<div className="w-full mt-6 space-y-2">
|
||||
<p className="text-xs text-black/50 dark:text-white/50">
|
||||
We'll refresh the page after updating the settings.
|
||||
</p>
|
||||
<button
|
||||
onClick={handleSubmit}
|
||||
className="bg-[#24A0ED] flex flex-row items-center space-x-2 text-white disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#ececec21] rounded-full px-4 py-2"
|
||||
disabled={isLoading || isUpdating}
|
||||
>
|
||||
{isUpdating ? (
|
||||
<RefreshCw size={20} className="animate-spin" />
|
||||
) : (
|
||||
<CloudUpload size={20} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</DialogPanel>
|
||||
</TransitionChild>
|
||||
</div>
|
||||
</div>
|
||||
</Dialog>
|
||||
</Transition>
|
||||
);
|
||||
};
|
||||
|
||||
export default SettingsDialog;
|
@ -6,7 +6,6 @@ import Link from 'next/link';
|
||||
import { useSelectedLayoutSegments } from 'next/navigation';
|
||||
import React, { useState, type ReactNode } from 'react';
|
||||
import Layout from './Layout';
|
||||
import SettingsDialog from './SettingsDialog';
|
||||
|
||||
const VerticalIconContainer = ({ children }: { children: ReactNode }) => {
|
||||
return (
|
||||
@ -67,15 +66,9 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
||||
))}
|
||||
</VerticalIconContainer>
|
||||
|
||||
<Settings
|
||||
onClick={() => setIsSettingsOpen(!isSettingsOpen)}
|
||||
className="cursor-pointer"
|
||||
/>
|
||||
|
||||
<SettingsDialog
|
||||
isOpen={isSettingsOpen}
|
||||
setIsOpen={setIsSettingsOpen}
|
||||
/>
|
||||
<Link href="/settings">
|
||||
<Settings className="cursor-pointer" />
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
'use client';
|
||||
import { useTheme } from 'next-themes';
|
||||
import { SunIcon, MoonIcon, MonitorIcon } from 'lucide-react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { Select } from '../SettingsDialog';
|
||||
import Select from '../ui/Select';
|
||||
|
||||
type Theme = 'dark' | 'light' | 'system';
|
||||
|
||||
|
28
ui/components/ui/Select.tsx
Normal file
28
ui/components/ui/Select.tsx
Normal file
@ -0,0 +1,28 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { SelectHTMLAttributes } from 'react';
|
||||
|
||||
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
||||
options: { value: string; label: string; disabled?: boolean }[];
|
||||
}
|
||||
|
||||
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
||||
return (
|
||||
<select
|
||||
{...restProps}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
{options.map(({ label, value, disabled }) => {
|
||||
return (
|
||||
<option key={value} value={value} disabled={disabled}>
|
||||
{label}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</select>
|
||||
);
|
||||
};
|
||||
|
||||
export default Select;
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "perplexica-frontend",
|
||||
"version": "1.10.0-rc2",
|
||||
"version": "1.10.0-rc3",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
|
Reference in New Issue
Block a user