mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-26 19:58:41 +00:00
Compare commits
11 Commits
develop/v1
...
6b72114010
Author | SHA1 | Date | |
---|---|---|---|
6b72114010 | |||
5cf0598985 | |||
b38413e92d | |||
0c908618c9 | |||
42fa951fd6 | |||
ece1a60f4d | |||
802627afcd | |||
f12c39f41e | |||
65692f1d52 | |||
c4f818f602 | |||
6edac6938c |
4
.gitignore
vendored
4
.gitignore
vendored
@ -2,7 +2,6 @@
|
|||||||
node_modules/
|
node_modules/
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
package-lock.json
|
|
||||||
|
|
||||||
# Build output
|
# Build output
|
||||||
/.next/
|
/.next/
|
||||||
@ -38,6 +37,3 @@ Thumbs.db
|
|||||||
# Db
|
# Db
|
||||||
db.sqlite
|
db.sqlite
|
||||||
/searxng
|
/searxng
|
||||||
|
|
||||||
# Dev
|
|
||||||
docker-compose-dev.yaml
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
[](https://discord.gg/26aArMy8tT)
|
[](https://discord.gg/26aArMy8tT)
|
||||||
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Table of Contents <!-- omit in toc -->
|
## Table of Contents <!-- omit in toc -->
|
||||||
@ -43,7 +44,7 @@ Want to know more about its architecture and how it works? You can read it [here
|
|||||||
- **Normal Mode:** Processes your query and performs a web search.
|
- **Normal Mode:** Processes your query and performs a web search.
|
||||||
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
|
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
|
||||||
- **All Mode:** Searches the entire web to find the best results.
|
- **All Mode:** Searches the entire web to find the best results.
|
||||||
- **Writing Assistant Mode:** Helpful for writing tasks that do not require searching the web.
|
- **Writing Assistant Mode:** Helpful for writing tasks that does not require searching the web.
|
||||||
- **Academic Search Mode:** Finds articles and papers, ideal for academic research.
|
- **Academic Search Mode:** Finds articles and papers, ideal for academic research.
|
||||||
- **YouTube Search Mode:** Finds YouTube videos based on the search query.
|
- **YouTube Search Mode:** Finds YouTube videos based on the search query.
|
||||||
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
|
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
|
||||||
@ -142,7 +143,6 @@ You can access Perplexica over your home network by following our networking gui
|
|||||||
|
|
||||||
## One-Click Deployment
|
## One-Click Deployment
|
||||||
|
|
||||||
[](https://usw.sealos.io/?openapp=system-template%3FtemplateName%3Dperplexica)
|
|
||||||
[](https://repocloud.io/details/?app_id=267)
|
[](https://repocloud.io/details/?app_id=267)
|
||||||
|
|
||||||
## Upcoming Features
|
## Upcoming Features
|
||||||
|
@ -4,7 +4,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ./searxng:/etc/searxng:rw
|
- ./searxng:/etc/searxng:rw
|
||||||
ports:
|
ports:
|
||||||
- '4000:8080'
|
- 4000:8080
|
||||||
networks:
|
networks:
|
||||||
- perplexica-network
|
- perplexica-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -19,7 +19,7 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- searxng
|
- searxng
|
||||||
ports:
|
ports:
|
||||||
- '3001:3001'
|
- 3001:3001
|
||||||
volumes:
|
volumes:
|
||||||
- backend-dbstore:/home/perplexica/data
|
- backend-dbstore:/home/perplexica/data
|
||||||
- uploads:/home/perplexica/uploads
|
- uploads:/home/perplexica/uploads
|
||||||
@ -37,11 +37,12 @@ services:
|
|||||||
args:
|
args:
|
||||||
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
||||||
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
||||||
|
network: host
|
||||||
image: itzcrazykns1337/perplexica-frontend:main
|
image: itzcrazykns1337/perplexica-frontend:main
|
||||||
depends_on:
|
depends_on:
|
||||||
- perplexica-backend
|
- perplexica-backend
|
||||||
ports:
|
ports:
|
||||||
- '3000:3000'
|
- 3000:3000
|
||||||
networks:
|
networks:
|
||||||
- perplexica-network
|
- perplexica-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
@ -7,43 +7,34 @@ To update Perplexica to the latest version, follow these steps:
|
|||||||
1. Clone the latest version of Perplexica from GitHub:
|
1. Clone the latest version of Perplexica from GitHub:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Navigate to the project directory.
|
2. Navigate to the Project Directory.
|
||||||
|
|
||||||
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
|
3. Pull latest images from registry.
|
||||||
|
|
||||||
4. Pull the latest images from the registry.
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker compose pull
|
docker compose pull
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Update and recreate the containers.
|
4. Update and Recreate containers.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
6. Once the command completes, go to http://localhost:3000 and verify the latest changes.
|
5. Once the command completes running go to http://localhost:3000 and verify the latest changes.
|
||||||
|
|
||||||
## For non-Docker users
|
## For non Docker users
|
||||||
|
|
||||||
1. Clone the latest version of Perplexica from GitHub:
|
1. Clone the latest version of Perplexica from GitHub:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
git clone https://github.com/ItzCrazyKns/Perplexica.git
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Navigate to the project directory.
|
2. Navigate to the Project Directory
|
||||||
|
3. Execute `npm i` in both the `ui` folder and the root directory.
|
||||||
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
|
4. Once packages are updated, execute `npm run build` in both the `ui` folder and the root directory.
|
||||||
|
5. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
|
||||||
4. Execute `npm i` in both the `ui` folder and the root directory.
|
|
||||||
|
|
||||||
5. Once the packages are updated, execute `npm run build` in both the `ui` folder and the root directory.
|
|
||||||
|
|
||||||
6. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
6912
package-lock.json
generated
Normal file
6912
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-backend",
|
"name": "perplexica-backend",
|
||||||
"version": "1.10.0-rc3",
|
"version": "1.10.0-rc2",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -30,8 +30,8 @@
|
|||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@langchain/anthropic": "^0.2.3",
|
"@langchain/anthropic": "^0.2.3",
|
||||||
"@langchain/community": "^0.2.16",
|
"@langchain/community": "^0.2.16",
|
||||||
"@langchain/google-genai": "^0.0.23",
|
|
||||||
"@langchain/openai": "^0.0.25",
|
"@langchain/openai": "^0.0.25",
|
||||||
|
"@langchain/google-genai": "^0.0.23",
|
||||||
"@xenova/transformers": "^2.17.1",
|
"@xenova/transformers": "^2.17.1",
|
||||||
"axios": "^1.6.8",
|
"axios": "^1.6.8",
|
||||||
"better-sqlite3": "^11.0.0",
|
"better-sqlite3": "^11.0.0",
|
||||||
|
@ -3,12 +3,6 @@ PORT = 3001 # Port to run the server on
|
|||||||
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||||
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
||||||
|
|
||||||
[SEARCH_ENGINE_BACKENDS] # "google" | "searxng" | "bing" | "brave" | "yacy"
|
|
||||||
SEARCH = "searxng"
|
|
||||||
IMAGE = "searxng"
|
|
||||||
VIDEO = "searxng"
|
|
||||||
NEWS = "searxng"
|
|
||||||
|
|
||||||
[MODELS.OPENAI]
|
[MODELS.OPENAI]
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
@ -21,25 +15,16 @@ API_KEY = ""
|
|||||||
[MODELS.GEMINI]
|
[MODELS.GEMINI]
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
[MODELS.CUSTOM_OPENAI]
|
|
||||||
API_KEY = ""
|
|
||||||
API_URL = ""
|
|
||||||
|
|
||||||
[MODELS.OLLAMA]
|
[MODELS.OLLAMA]
|
||||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||||
|
|
||||||
[SEARCH_ENGINES.GOOGLE]
|
[MODELS.LMSTUDIO]
|
||||||
|
API_URL = "" # LM STUDIO API URL - http://host.docker.internal:1234/v1
|
||||||
|
|
||||||
|
[MODELS.CUSTOM_OPENAI]
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
CSE_ID = ""
|
API_URL = ""
|
||||||
|
MODEL_NAME = ""
|
||||||
|
|
||||||
[SEARCH_ENGINES.SEARXNG]
|
[API_ENDPOINTS]
|
||||||
ENDPOINT = ""
|
SEARXNG = "http://localhost:32768" # SearxNG API URL
|
||||||
|
|
||||||
[SEARCH_ENGINES.BING]
|
|
||||||
SUBSCRIPTION_KEY = ""
|
|
||||||
|
|
||||||
[SEARCH_ENGINES.BRAVE]
|
|
||||||
API_KEY = ""
|
|
||||||
|
|
||||||
[SEARCH_ENGINES.YACY]
|
|
||||||
ENDPOINT = ""
|
|
||||||
|
@ -15,5 +15,3 @@ server:
|
|||||||
engines:
|
engines:
|
||||||
- name: wolframalpha
|
- name: wolframalpha
|
||||||
disabled: false
|
disabled: false
|
||||||
- name: qwant
|
|
||||||
disabled: true
|
|
||||||
|
@ -7,12 +7,7 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { getImageSearchEngineBackend } from '../config';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const imageSearchChainPrompt = `
|
const imageSearchChainPrompt = `
|
||||||
@ -41,103 +36,6 @@ type ImageSearchChainInput = {
|
|||||||
query: string;
|
query: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
async function performImageSearch(query: string) {
|
|
||||||
const searchEngine = getImageSearchEngineBackend();
|
|
||||||
let images = [];
|
|
||||||
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'google': {
|
|
||||||
const googleResult = await searchGooglePSE(query);
|
|
||||||
images = googleResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.displayLink,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'searxng': {
|
|
||||||
const searxResult = await searchSearxng(query, {
|
|
||||||
engines: ['google images', 'bing images'],
|
|
||||||
pageno: 1,
|
|
||||||
});
|
|
||||||
searxResult.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
images.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'brave': {
|
|
||||||
const braveResult = await searchBraveAPI(query);
|
|
||||||
images = braveResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.url,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'yacy': {
|
|
||||||
const yacyResult = await searchYaCy(query);
|
|
||||||
images = yacyResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.url,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'bing': {
|
|
||||||
const bingResult = await searchBingAPI(query);
|
|
||||||
images = bingResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.url,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return images;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strParser = new StringOutputParser();
|
const strParser = new StringOutputParser();
|
||||||
|
|
||||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
const createImageSearchChain = (llm: BaseChatModel) => {
|
||||||
@ -154,7 +52,22 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||||||
llm,
|
llm,
|
||||||
strParser,
|
strParser,
|
||||||
RunnableLambda.from(async (input: string) => {
|
RunnableLambda.from(async (input: string) => {
|
||||||
const images = await performImageSearch(input);
|
const res = await searchSearxng(input, {
|
||||||
|
engines: ['bing images', 'google images'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const images = [];
|
||||||
|
|
||||||
|
res.results.forEach((result) => {
|
||||||
|
if (result.img_src && result.url && result.title) {
|
||||||
|
images.push({
|
||||||
|
img_src: result.img_src,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return images.slice(0, 10);
|
return images.slice(0, 10);
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
@ -7,30 +7,26 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { getVideoSearchEngineBackend } from '../config';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const VideoSearchChainPrompt = `
|
const VideoSearchChainPrompt = `
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
1. Follow up question: How does a car work?
|
1. Follow up question: How does a car work?
|
||||||
Rephrased: How does a car work?
|
Rephrased: How does a car work?
|
||||||
|
|
||||||
2. Follow up question: What is the theory of relativity?
|
2. Follow up question: What is the theory of relativity?
|
||||||
Rephrased: What is theory of relativity
|
Rephrased: What is theory of relativity
|
||||||
|
|
||||||
3. Follow up question: How does an AC work?
|
3. Follow up question: How does an AC work?
|
||||||
Rephrased: How does an AC work
|
Rephrased: How does an AC work
|
||||||
|
|
||||||
Conversation:
|
Conversation:
|
||||||
{chat_history}
|
{chat_history}
|
||||||
|
|
||||||
Follow up question: {query}
|
Follow up question: {query}
|
||||||
Rephrased question:
|
Rephrased question:
|
||||||
`;
|
`;
|
||||||
@ -42,102 +38,6 @@ type VideoSearchChainInput = {
|
|||||||
|
|
||||||
const strParser = new StringOutputParser();
|
const strParser = new StringOutputParser();
|
||||||
|
|
||||||
async function performVideoSearch(query: string) {
|
|
||||||
const searchEngine = getVideoSearchEngineBackend();
|
|
||||||
const youtubeQuery = `${query} site:youtube.com`;
|
|
||||||
let videos = [];
|
|
||||||
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'google': {
|
|
||||||
const googleResult = await searchGooglePSE(youtubeQuery);
|
|
||||||
googleResult.results.forEach((result) => {
|
|
||||||
// Use .results instead of .originalres
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
const videoId = new URL(result.url).searchParams.get('v');
|
|
||||||
videos.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: videoId
|
|
||||||
? `https://www.youtube.com/embed/${videoId}`
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'searxng': {
|
|
||||||
const searxResult = await searchSearxng(query, {
|
|
||||||
engines: ['youtube'],
|
|
||||||
});
|
|
||||||
searxResult.results.forEach((result) => {
|
|
||||||
if (
|
|
||||||
result.thumbnail &&
|
|
||||||
result.url &&
|
|
||||||
result.title &&
|
|
||||||
result.iframe_src
|
|
||||||
) {
|
|
||||||
videos.push({
|
|
||||||
img_src: result.thumbnail,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: result.iframe_src,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'brave': {
|
|
||||||
const braveResult = await searchBraveAPI(youtubeQuery);
|
|
||||||
braveResult.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
const videoId = new URL(result.url).searchParams.get('v');
|
|
||||||
videos.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: videoId
|
|
||||||
? `https://www.youtube.com/embed/${videoId}`
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'yacy': {
|
|
||||||
console.log('Not available for yacy');
|
|
||||||
videos = [];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'bing': {
|
|
||||||
const bingResult = await searchBingAPI(youtubeQuery);
|
|
||||||
bingResult.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
const videoId = new URL(result.url).searchParams.get('v');
|
|
||||||
videos.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: videoId
|
|
||||||
? `https://www.youtube.com/embed/${videoId}`
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return videos;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
const createVideoSearchChain = (llm: BaseChatModel) => {
|
||||||
return RunnableSequence.from([
|
return RunnableSequence.from([
|
||||||
RunnableMap.from({
|
RunnableMap.from({
|
||||||
@ -152,7 +52,28 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||||||
llm,
|
llm,
|
||||||
strParser,
|
strParser,
|
||||||
RunnableLambda.from(async (input: string) => {
|
RunnableLambda.from(async (input: string) => {
|
||||||
const videos = await performVideoSearch(input);
|
const res = await searchSearxng(input, {
|
||||||
|
engines: ['youtube'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const videos = [];
|
||||||
|
|
||||||
|
res.results.forEach((result) => {
|
||||||
|
if (
|
||||||
|
result.thumbnail &&
|
||||||
|
result.url &&
|
||||||
|
result.title &&
|
||||||
|
result.iframe_src
|
||||||
|
) {
|
||||||
|
videos.push({
|
||||||
|
img_src: result.thumbnail,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
iframe_src: result.iframe_src,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return videos.slice(0, 10);
|
return videos.slice(0, 10);
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
@ -10,12 +10,6 @@ interface Config {
|
|||||||
SIMILARITY_MEASURE: string;
|
SIMILARITY_MEASURE: string;
|
||||||
KEEP_ALIVE: string;
|
KEEP_ALIVE: string;
|
||||||
};
|
};
|
||||||
SEARCH_ENGINE_BACKENDS: {
|
|
||||||
SEARCH: string;
|
|
||||||
IMAGE: string;
|
|
||||||
VIDEO: string;
|
|
||||||
NEWS: string;
|
|
||||||
};
|
|
||||||
MODELS: {
|
MODELS: {
|
||||||
OPENAI: {
|
OPENAI: {
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
@ -32,29 +26,17 @@ interface Config {
|
|||||||
OLLAMA: {
|
OLLAMA: {
|
||||||
API_URL: string;
|
API_URL: string;
|
||||||
};
|
};
|
||||||
|
LMSTUDIO: {
|
||||||
|
API_URL: string;
|
||||||
|
};
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: string;
|
API_URL: string;
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
MODEL_NAME: string;
|
MODEL_NAME: string;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
SEARCH_ENGINES: {
|
API_ENDPOINTS: {
|
||||||
GOOGLE: {
|
SEARXNG: string;
|
||||||
API_KEY: string;
|
|
||||||
CSE_ID: string;
|
|
||||||
};
|
|
||||||
SEARXNG: {
|
|
||||||
ENDPOINT: string;
|
|
||||||
};
|
|
||||||
BING: {
|
|
||||||
SUBSCRIPTION_KEY: string;
|
|
||||||
};
|
|
||||||
BRAVE: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
YACY: {
|
|
||||||
ENDPOINT: string;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,35 +64,13 @@ export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
|||||||
|
|
||||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||||
|
|
||||||
export const getSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.SEARCH;
|
|
||||||
|
|
||||||
export const getImageSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.IMAGE || getSearchEngineBackend();
|
|
||||||
|
|
||||||
export const getVideoSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.VIDEO || getSearchEngineBackend();
|
|
||||||
|
|
||||||
export const getNewsSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.NEWS || getSearchEngineBackend();
|
|
||||||
|
|
||||||
export const getGoogleApiKey = () => loadConfig().SEARCH_ENGINES.GOOGLE.API_KEY;
|
|
||||||
|
|
||||||
export const getGoogleCseId = () => loadConfig().SEARCH_ENGINES.GOOGLE.CSE_ID;
|
|
||||||
|
|
||||||
export const getBraveApiKey = () => loadConfig().SEARCH_ENGINES.BRAVE.API_KEY;
|
|
||||||
|
|
||||||
export const getBingSubscriptionKey = () =>
|
|
||||||
loadConfig().SEARCH_ENGINES.BING.SUBSCRIPTION_KEY;
|
|
||||||
|
|
||||||
export const getYacyJsonEndpoint = () =>
|
|
||||||
loadConfig().SEARCH_ENGINES.YACY.ENDPOINT;
|
|
||||||
|
|
||||||
export const getSearxngApiEndpoint = () =>
|
export const getSearxngApiEndpoint = () =>
|
||||||
process.env.SEARXNG_API_URL || loadConfig().SEARCH_ENGINES.SEARXNG.ENDPOINT;
|
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||||
|
|
||||||
|
export const getLMStudioApiEndpoint = () => loadConfig().MODELS.LMSTUDIO.API_URL;
|
||||||
|
|
||||||
export const getCustomOpenaiApiKey = () =>
|
export const getCustomOpenaiApiKey = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||||
|
|
||||||
@ -121,10 +81,6 @@ export const getCustomOpenaiModelName = () =>
|
|||||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||||
|
|
||||||
const mergeConfigs = (current: any, update: any): any => {
|
const mergeConfigs = (current: any, update: any): any => {
|
||||||
if (update === null || update === undefined) {
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof current !== 'object' || current === null) {
|
if (typeof current !== 'object' || current === null) {
|
||||||
return update;
|
return update;
|
||||||
}
|
}
|
||||||
|
1
src/lib/chat/service.ts
Normal file
1
src/lib/chat/service.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
@ -4,6 +4,7 @@ import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
|
|||||||
import { loadAnthropicChatModels } from './anthropic';
|
import { loadAnthropicChatModels } from './anthropic';
|
||||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
import { loadTransformersEmbeddingsModels } from './transformers';
|
||||||
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';
|
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';
|
||||||
|
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels } from './lmstudio';
|
||||||
import {
|
import {
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
@ -17,6 +18,7 @@ const chatModelProviders = {
|
|||||||
ollama: loadOllamaChatModels,
|
ollama: loadOllamaChatModels,
|
||||||
anthropic: loadAnthropicChatModels,
|
anthropic: loadAnthropicChatModels,
|
||||||
gemini: loadGeminiChatModels,
|
gemini: loadGeminiChatModels,
|
||||||
|
lm_studio: loadLMStudioChatModels,
|
||||||
};
|
};
|
||||||
|
|
||||||
const embeddingModelProviders = {
|
const embeddingModelProviders = {
|
||||||
@ -24,6 +26,7 @@ const embeddingModelProviders = {
|
|||||||
local: loadTransformersEmbeddingsModels,
|
local: loadTransformersEmbeddingsModels,
|
||||||
ollama: loadOllamaEmbeddingsModels,
|
ollama: loadOllamaEmbeddingsModels,
|
||||||
gemini: loadGeminiEmbeddingsModels,
|
gemini: loadGeminiEmbeddingsModels,
|
||||||
|
lm_studio: loadLMStudioEmbeddingsModels,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getAvailableChatModelProviders = async () => {
|
export const getAvailableChatModelProviders = async () => {
|
||||||
@ -36,27 +39,7 @@ export const getAvailableChatModelProviders = async () => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const customOpenAiApiKey = getCustomOpenaiApiKey();
|
models['custom_openai'] = {};
|
||||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
|
||||||
const customOpenAiModelName = getCustomOpenaiModelName();
|
|
||||||
|
|
||||||
models['custom_openai'] = {
|
|
||||||
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
|
||||||
? {
|
|
||||||
[customOpenAiModelName]: {
|
|
||||||
displayName: customOpenAiModelName,
|
|
||||||
model: new ChatOpenAI({
|
|
||||||
openAIApiKey: customOpenAiApiKey,
|
|
||||||
modelName: customOpenAiModelName,
|
|
||||||
temperature: 0.7,
|
|
||||||
configuration: {
|
|
||||||
baseURL: customOpenAiApiUrl,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: {}),
|
|
||||||
};
|
|
||||||
|
|
||||||
return models;
|
return models;
|
||||||
};
|
};
|
||||||
|
89
src/lib/providers/lmstudio.ts
Normal file
89
src/lib/providers/lmstudio.ts
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||||
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
import { getKeepAlive, getLMStudioApiEndpoint } from '../../config';
|
||||||
|
import logger from '../../utils/logger';
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
interface LMStudioModel {
|
||||||
|
id: string;
|
||||||
|
// add other properties if LM Studio API provides them
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ChatModelConfig {
|
||||||
|
displayName: string;
|
||||||
|
model: ChatOpenAI;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const loadLMStudioChatModels = async (): Promise<Record<string, ChatModelConfig>> => {
|
||||||
|
const lmStudioEndpoint = getLMStudioApiEndpoint();
|
||||||
|
|
||||||
|
if (!lmStudioEndpoint) {
|
||||||
|
logger.debug('LM Studio endpoint not configured, skipping');
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get<{ data: LMStudioModel[] }>(`${lmStudioEndpoint}/models`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const lmStudioModels = response.data.data;
|
||||||
|
|
||||||
|
const chatModels = lmStudioModels.reduce<Record<string, ChatModelConfig>>((acc, model) => {
|
||||||
|
acc[model.id] = {
|
||||||
|
displayName: model.id,
|
||||||
|
model: new ChatOpenAI({
|
||||||
|
openAIApiKey: 'lm-studio',
|
||||||
|
configuration: {
|
||||||
|
baseURL: lmStudioEndpoint,
|
||||||
|
},
|
||||||
|
modelName: model.id,
|
||||||
|
temperature: 0.7,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
return chatModels;
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(`Error loading LM Studio models: ${err}`);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const loadLMStudioEmbeddingsModels = async () => {
|
||||||
|
const lmStudioEndpoint = getLMStudioApiEndpoint();
|
||||||
|
|
||||||
|
if (!lmStudioEndpoint) return {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(`${lmStudioEndpoint}/models`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const lmStudioModels = response.data.data;
|
||||||
|
|
||||||
|
const embeddingsModels = lmStudioModels.reduce((acc, model) => {
|
||||||
|
acc[model.id] = {
|
||||||
|
displayName: model.id,
|
||||||
|
model: new OpenAIEmbeddings({
|
||||||
|
openAIApiKey: 'lm-studio', // Dummy key required by LangChain
|
||||||
|
configuration: {
|
||||||
|
baseURL: lmStudioEndpoint,
|
||||||
|
},
|
||||||
|
modelName: model.id,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
return embeddingsModels;
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(`Error loading LM Studio embeddings model: ${err}`);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
@ -1,105 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getBingSubscriptionKey } from '../../config';
|
|
||||||
|
|
||||||
interface BingAPISearchResult {
|
|
||||||
_type: string;
|
|
||||||
name: string;
|
|
||||||
url: string;
|
|
||||||
displayUrl: string;
|
|
||||||
snippet?: string;
|
|
||||||
dateLastCrawled?: string;
|
|
||||||
thumbnailUrl?: string;
|
|
||||||
contentUrl?: string;
|
|
||||||
hostPageUrl?: string;
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
accentColor?: string;
|
|
||||||
contentSize?: string;
|
|
||||||
datePublished?: string;
|
|
||||||
encodingFormat?: string;
|
|
||||||
hostPageDisplayUrl?: string;
|
|
||||||
id?: string;
|
|
||||||
isLicensed?: boolean;
|
|
||||||
isFamilyFriendly?: boolean;
|
|
||||||
language?: string;
|
|
||||||
mediaUrl?: string;
|
|
||||||
motionThumbnailUrl?: string;
|
|
||||||
publisher?: string;
|
|
||||||
viewCount?: number;
|
|
||||||
webSearchUrl?: string;
|
|
||||||
primaryImageOfPage?: {
|
|
||||||
thumbnailUrl?: string;
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
};
|
|
||||||
video?: {
|
|
||||||
allowHttpsEmbed?: boolean;
|
|
||||||
embedHtml?: string;
|
|
||||||
allowMobileEmbed?: boolean;
|
|
||||||
viewCount?: number;
|
|
||||||
duration?: string;
|
|
||||||
};
|
|
||||||
image?: {
|
|
||||||
thumbnail?: {
|
|
||||||
contentUrl?: string;
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
};
|
|
||||||
imageInsightsToken?: string;
|
|
||||||
imageId?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchBingAPI = async (query: string) => {
|
|
||||||
try {
|
|
||||||
const bingApiKey = await getBingSubscriptionKey();
|
|
||||||
const url = new URL(`https://api.cognitive.microsoft.com/bing/v7.0/search`);
|
|
||||||
url.searchParams.append('q', query);
|
|
||||||
url.searchParams.append('responseFilter', 'Webpages,Images,Videos');
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString(), {
|
|
||||||
headers: {
|
|
||||||
'Ocp-Apim-Subscription-Key': bingApiKey,
|
|
||||||
Accept: 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (res.data.error) {
|
|
||||||
throw new Error(`Bing API Error: ${res.data.error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const originalres = res.data;
|
|
||||||
|
|
||||||
// Extract web, image, and video results
|
|
||||||
const webResults = originalres.webPages?.value || [];
|
|
||||||
const imageResults = originalres.images?.value || [];
|
|
||||||
const videoResults = originalres.videos?.value || [];
|
|
||||||
|
|
||||||
const results = webResults.map((item: BingAPISearchResult) => ({
|
|
||||||
title: item.name,
|
|
||||||
url: item.url,
|
|
||||||
content: item.snippet,
|
|
||||||
img_src:
|
|
||||||
item.primaryImageOfPage?.thumbnailUrl ||
|
|
||||||
imageResults.find((img: any) => img.hostPageUrl === item.url)
|
|
||||||
?.thumbnailUrl ||
|
|
||||||
videoResults.find((vid: any) => vid.hostPageUrl === item.url)
|
|
||||||
?.thumbnailUrl,
|
|
||||||
...(item.video && {
|
|
||||||
videoData: {
|
|
||||||
duration: item.video.duration,
|
|
||||||
embedUrl: item.video.embedHtml?.match(/src="(.*?)"/)?.[1],
|
|
||||||
},
|
|
||||||
publisher: item.publisher,
|
|
||||||
datePublished: item.datePublished,
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`Bing API Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,102 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getBraveApiKey } from '../../config';
|
|
||||||
|
|
||||||
interface BraveSearchResult {
|
|
||||||
title: string;
|
|
||||||
url: string;
|
|
||||||
content?: string;
|
|
||||||
img_src?: string;
|
|
||||||
age?: string;
|
|
||||||
family_friendly?: boolean;
|
|
||||||
language?: string;
|
|
||||||
video?: {
|
|
||||||
embedUrl?: string;
|
|
||||||
duration?: string;
|
|
||||||
};
|
|
||||||
rating?: {
|
|
||||||
value: number;
|
|
||||||
scale: number;
|
|
||||||
};
|
|
||||||
products?: Array<{
|
|
||||||
name: string;
|
|
||||||
price?: string;
|
|
||||||
}>;
|
|
||||||
recipe?: {
|
|
||||||
ingredients?: string[];
|
|
||||||
cookTime?: string;
|
|
||||||
};
|
|
||||||
meta?: {
|
|
||||||
fetched?: string;
|
|
||||||
lastCrawled?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchBraveAPI = async (
|
|
||||||
query: string,
|
|
||||||
numResults: number = 20,
|
|
||||||
): Promise<{ results: BraveSearchResult[]; originalres: any }> => {
|
|
||||||
try {
|
|
||||||
const braveApiKey = await getBraveApiKey();
|
|
||||||
const url = new URL(`https://api.search.brave.com/res/v1/web/search`);
|
|
||||||
|
|
||||||
url.searchParams.append('q', query);
|
|
||||||
url.searchParams.append('count', numResults.toString());
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString(), {
|
|
||||||
headers: {
|
|
||||||
'X-Subscription-Token': braveApiKey,
|
|
||||||
Accept: 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (res.data.error) {
|
|
||||||
throw new Error(`Brave API Error: ${res.data.error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const originalres = res.data;
|
|
||||||
const webResults = originalres.web?.results || [];
|
|
||||||
|
|
||||||
const results: BraveSearchResult[] = webResults.map((item: any) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.description,
|
|
||||||
img_src: item.thumbnail?.src || item.deep_results?.images?.[0]?.src,
|
|
||||||
age: item.age,
|
|
||||||
family_friendly: item.family_friendly,
|
|
||||||
language: item.language,
|
|
||||||
video: item.video
|
|
||||||
? {
|
|
||||||
embedUrl: item.video.embed_url,
|
|
||||||
duration: item.video.duration,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
rating: item.rating
|
|
||||||
? {
|
|
||||||
value: item.rating.value,
|
|
||||||
scale: item.rating.scale_max,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
products: item.deep_results?.product_cluster?.map((p: any) => ({
|
|
||||||
name: p.name,
|
|
||||||
price: p.price,
|
|
||||||
})),
|
|
||||||
recipe: item.recipe
|
|
||||||
? {
|
|
||||||
ingredients: item.recipe.ingredients,
|
|
||||||
cookTime: item.recipe.cook_time,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
meta: {
|
|
||||||
fetched: item.meta?.fetched,
|
|
||||||
lastCrawled: item.meta?.last_crawled,
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`Brave API Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,85 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getGoogleApiKey, getGoogleCseId } from '../../config';
|
|
||||||
|
|
||||||
interface GooglePSESearchResult {
|
|
||||||
kind: string;
|
|
||||||
title: string;
|
|
||||||
htmlTitle: string;
|
|
||||||
link: string;
|
|
||||||
displayLink: string;
|
|
||||||
snippet?: string;
|
|
||||||
htmlSnippet?: string;
|
|
||||||
cacheId?: string;
|
|
||||||
formattedUrl: string;
|
|
||||||
htmlFormattedUrl: string;
|
|
||||||
pagemap?: {
|
|
||||||
videoobject: any;
|
|
||||||
cse_thumbnail?: Array<{
|
|
||||||
src: string;
|
|
||||||
width: string;
|
|
||||||
height: string;
|
|
||||||
}>;
|
|
||||||
metatags?: Array<{
|
|
||||||
[key: string]: string;
|
|
||||||
author?: string;
|
|
||||||
}>;
|
|
||||||
cse_image?: Array<{
|
|
||||||
src: string;
|
|
||||||
}>;
|
|
||||||
};
|
|
||||||
fileFormat?: string;
|
|
||||||
image?: {
|
|
||||||
contextLink: string;
|
|
||||||
thumbnailLink: string;
|
|
||||||
};
|
|
||||||
mime?: string;
|
|
||||||
labels?: Array<{
|
|
||||||
name: string;
|
|
||||||
displayName: string;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchGooglePSE = async (query: string) => {
|
|
||||||
try {
|
|
||||||
const [googleApiKey, googleCseID] = await Promise.all([
|
|
||||||
getGoogleApiKey(),
|
|
||||||
getGoogleCseId(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const url = new URL(`https://www.googleapis.com/customsearch/v1`);
|
|
||||||
url.searchParams.append('q', query);
|
|
||||||
url.searchParams.append('cx', googleCseID);
|
|
||||||
url.searchParams.append('key', googleApiKey);
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString());
|
|
||||||
|
|
||||||
if (res.data.error) {
|
|
||||||
throw new Error(`Google PSE Error: ${res.data.error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const originalres = res.data.items;
|
|
||||||
|
|
||||||
const results = originalres.map((item: GooglePSESearchResult) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.link,
|
|
||||||
content: item.snippet,
|
|
||||||
img_src:
|
|
||||||
item.pagemap?.cse_image?.[0]?.src ||
|
|
||||||
item.pagemap?.cse_thumbnail?.[0]?.src ||
|
|
||||||
item.image?.thumbnailLink,
|
|
||||||
...(item.pagemap?.videoobject?.[0] && {
|
|
||||||
videoData: {
|
|
||||||
duration: item.pagemap.videoobject[0].duration,
|
|
||||||
embedUrl: item.pagemap.videoobject[0].embedurl,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`Google PSE Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,79 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getYacyJsonEndpoint } from '../../config';
|
|
||||||
|
|
||||||
interface YaCySearchResult {
|
|
||||||
channels: {
|
|
||||||
title: string;
|
|
||||||
description: string;
|
|
||||||
link: string;
|
|
||||||
image: {
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
link: string;
|
|
||||||
};
|
|
||||||
startIndex: string;
|
|
||||||
itemsPerPage: string;
|
|
||||||
searchTerms: string;
|
|
||||||
items: {
|
|
||||||
title: string;
|
|
||||||
link: string;
|
|
||||||
code: string;
|
|
||||||
description: string;
|
|
||||||
pubDate: string;
|
|
||||||
image?: string;
|
|
||||||
size: string;
|
|
||||||
sizename: string;
|
|
||||||
guid: string;
|
|
||||||
faviconUrl: string;
|
|
||||||
host: string;
|
|
||||||
path: string;
|
|
||||||
file: string;
|
|
||||||
urlhash: string;
|
|
||||||
ranking: string;
|
|
||||||
}[];
|
|
||||||
navigation: {
|
|
||||||
facetname: string;
|
|
||||||
displayname: string;
|
|
||||||
type: string;
|
|
||||||
min: string;
|
|
||||||
max: string;
|
|
||||||
mean: string;
|
|
||||||
elements: {
|
|
||||||
name: string;
|
|
||||||
count: string;
|
|
||||||
modifier: string;
|
|
||||||
url: string;
|
|
||||||
}[];
|
|
||||||
}[];
|
|
||||||
}[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchYaCy = async (query: string, numResults: number = 20) => {
|
|
||||||
try {
|
|
||||||
const yacyBaseUrl = getYacyJsonEndpoint();
|
|
||||||
|
|
||||||
const url = new URL(`${yacyBaseUrl}/yacysearch.json`);
|
|
||||||
url.searchParams.append('query', query);
|
|
||||||
url.searchParams.append('count', numResults.toString());
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString());
|
|
||||||
|
|
||||||
const originalres = res.data as YaCySearchResult;
|
|
||||||
|
|
||||||
const results = originalres.channels[0].items.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.link,
|
|
||||||
content: item.description,
|
|
||||||
img_src: item.image || null,
|
|
||||||
pubDate: item.pubDate,
|
|
||||||
host: item.host,
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`YaCy Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,5 +1,5 @@
|
|||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import { getSearxngApiEndpoint } from '../../config';
|
import { getSearxngApiEndpoint } from '../config';
|
||||||
|
|
||||||
interface SearxngSearchOptions {
|
interface SearxngSearchOptions {
|
||||||
categories?: string[];
|
categories?: string[];
|
@ -6,6 +6,7 @@ import {
|
|||||||
import {
|
import {
|
||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
|
getLMStudioApiEndpoint,
|
||||||
getAnthropicApiKey,
|
getAnthropicApiKey,
|
||||||
getGeminiApiKey,
|
getGeminiApiKey,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
@ -13,16 +14,6 @@ import {
|
|||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
getSearchEngineBackend,
|
|
||||||
getImageSearchEngineBackend,
|
|
||||||
getVideoSearchEngineBackend,
|
|
||||||
getNewsSearchEngineBackend,
|
|
||||||
getSearxngApiEndpoint,
|
|
||||||
getGoogleApiKey,
|
|
||||||
getGoogleCseId,
|
|
||||||
getBingSubscriptionKey,
|
|
||||||
getBraveApiKey,
|
|
||||||
getYacyJsonEndpoint,
|
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
@ -64,27 +55,13 @@ router.get('/', async (_, res) => {
|
|||||||
|
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||||
|
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
|
||||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
config['geminiApiKey'] = getGeminiApiKey();
|
config['geminiApiKey'] = getGeminiApiKey();
|
||||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
config['customOpenaiModelName'] = getCustomOpenaiModelName()
|
||||||
|
|
||||||
// Add search engine configuration
|
|
||||||
config['searchEngineBackends'] = {
|
|
||||||
search: getSearchEngineBackend(),
|
|
||||||
image: getImageSearchEngineBackend(),
|
|
||||||
video: getVideoSearchEngineBackend(),
|
|
||||||
news: getNewsSearchEngineBackend(),
|
|
||||||
};
|
|
||||||
|
|
||||||
config['searxngEndpoint'] = getSearxngApiEndpoint();
|
|
||||||
config['googleApiKey'] = getGoogleApiKey();
|
|
||||||
config['googleCseId'] = getGoogleCseId();
|
|
||||||
config['bingSubscriptionKey'] = getBingSubscriptionKey();
|
|
||||||
config['braveApiKey'] = getBraveApiKey();
|
|
||||||
config['yacyEndpoint'] = getYacyJsonEndpoint();
|
|
||||||
|
|
||||||
res.status(200).json(config);
|
res.status(200).json(config);
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
@ -113,36 +90,15 @@ router.post('/', async (req, res) => {
|
|||||||
OLLAMA: {
|
OLLAMA: {
|
||||||
API_URL: config.ollamaApiUrl,
|
API_URL: config.ollamaApiUrl,
|
||||||
},
|
},
|
||||||
|
LMSTUDIO: {
|
||||||
|
API_URL: config.lmStudioApiUrl,
|
||||||
|
},
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: config.customOpenaiApiUrl,
|
API_URL: config.customOpenaiApiUrl,
|
||||||
API_KEY: config.customOpenaiApiKey,
|
API_KEY: config.customOpenaiApiKey,
|
||||||
MODEL_NAME: config.customOpenaiModelName,
|
MODEL_NAME: config.customOpenaiModelName,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SEARCH_ENGINE_BACKENDS: config.searchEngineBackends ? {
|
|
||||||
SEARCH: config.searchEngineBackends.search,
|
|
||||||
IMAGE: config.searchEngineBackends.image,
|
|
||||||
VIDEO: config.searchEngineBackends.video,
|
|
||||||
NEWS: config.searchEngineBackends.news,
|
|
||||||
} : undefined,
|
|
||||||
SEARCH_ENGINES: {
|
|
||||||
GOOGLE: {
|
|
||||||
API_KEY: config.googleApiKey,
|
|
||||||
CSE_ID: config.googleCseId,
|
|
||||||
},
|
|
||||||
SEARXNG: {
|
|
||||||
ENDPOINT: config.searxngEndpoint,
|
|
||||||
},
|
|
||||||
BING: {
|
|
||||||
SUBSCRIPTION_KEY: config.bingSubscriptionKey,
|
|
||||||
},
|
|
||||||
BRAVE: {
|
|
||||||
API_KEY: config.braveApiKey,
|
|
||||||
},
|
|
||||||
YACY: {
|
|
||||||
ENDPOINT: config.yacyEndpoint,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
updateConfig(updatedConfig);
|
updateConfig(updatedConfig);
|
||||||
|
@ -1,125 +1,42 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { getNewsSearchEngineBackend } from '../config';
|
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
async function performSearch(query: string, site: string) {
|
|
||||||
const searchEngine = getNewsSearchEngineBackend();
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'google': {
|
|
||||||
const googleResult = await searchGooglePSE(query);
|
|
||||||
|
|
||||||
return googleResult.originalres.map((item) => {
|
|
||||||
const imageSources = [
|
|
||||||
item.pagemap?.cse_image?.[0]?.src,
|
|
||||||
item.pagemap?.cse_thumbnail?.[0]?.src,
|
|
||||||
item.pagemap?.metatags?.[0]?.['og:image'],
|
|
||||||
item.pagemap?.metatags?.[0]?.['twitter:image'],
|
|
||||||
item.pagemap?.metatags?.[0]?.['image'],
|
|
||||||
].filter(Boolean); // Remove undefined values
|
|
||||||
|
|
||||||
return {
|
|
||||||
title: item.title,
|
|
||||||
url: item.link,
|
|
||||||
content: item.snippet,
|
|
||||||
thumbnail: imageSources[0], // First available image
|
|
||||||
img_src: imageSources[0], // Same as thumbnail for consistency
|
|
||||||
iframe_src: null,
|
|
||||||
author: item.pagemap?.metatags?.[0]?.['og:site_name'] || site,
|
|
||||||
publishedDate:
|
|
||||||
item.pagemap?.metatags?.[0]?.['article:published_time'],
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'searxng': {
|
|
||||||
const searxResult = await searchSearxng(query, {
|
|
||||||
engines: ['bing news'],
|
|
||||||
pageno: 1,
|
|
||||||
});
|
|
||||||
return searxResult.results;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'brave': {
|
|
||||||
const braveResult = await searchBraveAPI(query);
|
|
||||||
return braveResult.results.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.content,
|
|
||||||
thumbnail: item.img_src,
|
|
||||||
img_src: item.img_src,
|
|
||||||
iframe_src: null,
|
|
||||||
author: item.meta?.fetched || site,
|
|
||||||
publishedDate: item.meta?.lastCrawled,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'yacy': {
|
|
||||||
const yacyResult = await searchYaCy(query);
|
|
||||||
return yacyResult.results.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.content,
|
|
||||||
thumbnail: item.img_src,
|
|
||||||
img_src: item.img_src,
|
|
||||||
iframe_src: null,
|
|
||||||
author: item?.host || site,
|
|
||||||
publishedDate: item?.pubDate,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'bing': {
|
|
||||||
const bingResult = await searchBingAPI(query);
|
|
||||||
return bingResult.results.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.content,
|
|
||||||
thumbnail: item.img_src,
|
|
||||||
img_src: item.img_src,
|
|
||||||
iframe_src: null,
|
|
||||||
author: item?.publisher || site,
|
|
||||||
publishedDate: item?.datePublished,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
router.get('/', async (req, res) => {
|
router.get('/', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const queries = [
|
|
||||||
{ site: 'businessinsider.com', topic: 'AI' },
|
|
||||||
{ site: 'www.exchangewire.com', topic: 'AI' },
|
|
||||||
{ site: 'yahoo.com', topic: 'AI' },
|
|
||||||
{ site: 'businessinsider.com', topic: 'tech' },
|
|
||||||
{ site: 'www.exchangewire.com', topic: 'tech' },
|
|
||||||
{ site: 'yahoo.com', topic: 'tech' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const data = (
|
const data = (
|
||||||
await Promise.all(
|
await Promise.all([
|
||||||
queries.map(async ({ site, topic }) => {
|
searchSearxng('site:businessinsider.com AI', {
|
||||||
try {
|
engines: ['bing news'],
|
||||||
const query = `site:${site} ${topic}`;
|
pageno: 1,
|
||||||
return await performSearch(query, site);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Error searching ${site}: ${error.message}`);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}),
|
}),
|
||||||
)
|
searchSearxng('site:www.exchangewire.com AI', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:yahoo.com AI', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:businessinsider.com tech', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:www.exchangewire.com tech', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:yahoo.com tech', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
])
|
||||||
)
|
)
|
||||||
|
.map((result) => result.results)
|
||||||
.flat()
|
.flat()
|
||||||
.sort(() => Math.random() - 0.5)
|
.sort(() => Math.random() - 0.5);
|
||||||
.filter((item) => item.title && item.url && item.content);
|
|
||||||
|
|
||||||
return res.json({ blogs: data });
|
return res.json({ blogs: data });
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
|
@ -5,17 +5,14 @@ import { getAvailableChatModelProviders } from '../lib/providers';
|
|||||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
|
||||||
getCustomOpenaiApiKey,
|
|
||||||
getCustomOpenaiApiUrl,
|
|
||||||
getCustomOpenaiModelName,
|
|
||||||
} from '../config';
|
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
|
customOpenAIBaseURL?: string;
|
||||||
|
customOpenAIKey?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ImageSearchBody {
|
interface ImageSearchBody {
|
||||||
@ -47,12 +44,21 @@ router.post('/', async (req, res) => {
|
|||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
|
|
||||||
if (body.chatModel?.provider === 'custom_openai') {
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
|
if (
|
||||||
|
!body.chatModel?.customOpenAIBaseURL ||
|
||||||
|
!body.chatModel?.customOpenAIKey
|
||||||
|
) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||||
|
}
|
||||||
|
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
modelName: getCustomOpenaiModelName(),
|
modelName: body.chatModel.model,
|
||||||
openAIApiKey: getCustomOpenaiApiKey(),
|
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (
|
} else if (
|
||||||
|
@ -10,19 +10,14 @@ import {
|
|||||||
import { searchHandlers } from '../websocket/messageHandler';
|
import { searchHandlers } from '../websocket/messageHandler';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
import { MetaSearchAgentType } from '../search/metaSearchAgent';
|
import { MetaSearchAgentType } from '../search/metaSearchAgent';
|
||||||
import {
|
|
||||||
getCustomOpenaiApiKey,
|
|
||||||
getCustomOpenaiApiUrl,
|
|
||||||
getCustomOpenaiModelName,
|
|
||||||
} from '../config';
|
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
interface chatModel {
|
interface chatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
customOpenAIKey?: string;
|
|
||||||
customOpenAIBaseURL?: string;
|
customOpenAIBaseURL?: string;
|
||||||
|
customOpenAIKey?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface embeddingModel {
|
interface embeddingModel {
|
||||||
@ -83,14 +78,21 @@ router.post('/', async (req, res) => {
|
|||||||
let embeddings: Embeddings | undefined;
|
let embeddings: Embeddings | undefined;
|
||||||
|
|
||||||
if (body.chatModel?.provider === 'custom_openai') {
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
|
if (
|
||||||
|
!body.chatModel?.customOpenAIBaseURL ||
|
||||||
|
!body.chatModel?.customOpenAIKey
|
||||||
|
) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||||
|
}
|
||||||
|
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
modelName: body.chatModel?.model || getCustomOpenaiModelName(),
|
modelName: body.chatModel.model,
|
||||||
openAIApiKey:
|
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||||
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
|
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL:
|
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||||
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
|
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (
|
} else if (
|
||||||
|
@ -5,17 +5,14 @@ import { getAvailableChatModelProviders } from '../lib/providers';
|
|||||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
|
||||||
getCustomOpenaiApiKey,
|
|
||||||
getCustomOpenaiApiUrl,
|
|
||||||
getCustomOpenaiModelName,
|
|
||||||
} from '../config';
|
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
|
customOpenAIBaseURL?: string;
|
||||||
|
customOpenAIKey?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SuggestionsBody {
|
interface SuggestionsBody {
|
||||||
@ -46,12 +43,21 @@ router.post('/', async (req, res) => {
|
|||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
|
|
||||||
if (body.chatModel?.provider === 'custom_openai') {
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
|
if (
|
||||||
|
!body.chatModel?.customOpenAIBaseURL ||
|
||||||
|
!body.chatModel?.customOpenAIKey
|
||||||
|
) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||||
|
}
|
||||||
|
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
modelName: getCustomOpenaiModelName(),
|
modelName: body.chatModel.model,
|
||||||
openAIApiKey: getCustomOpenaiApiKey(),
|
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (
|
} else if (
|
||||||
|
@ -5,17 +5,14 @@ import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
|||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import handleVideoSearch from '../chains/videoSearchAgent';
|
import handleVideoSearch from '../chains/videoSearchAgent';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
|
||||||
getCustomOpenaiApiKey,
|
|
||||||
getCustomOpenaiApiUrl,
|
|
||||||
getCustomOpenaiModelName,
|
|
||||||
} from '../config';
|
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
|
customOpenAIBaseURL?: string;
|
||||||
|
customOpenAIKey?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface VideoSearchBody {
|
interface VideoSearchBody {
|
||||||
@ -47,12 +44,21 @@ router.post('/', async (req, res) => {
|
|||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
|
|
||||||
if (body.chatModel?.provider === 'custom_openai') {
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
|
if (
|
||||||
|
!body.chatModel?.customOpenAIBaseURL ||
|
||||||
|
!body.chatModel?.customOpenAIKey
|
||||||
|
) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ message: 'Missing custom OpenAI base URL or key' });
|
||||||
|
}
|
||||||
|
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
modelName: getCustomOpenaiModelName(),
|
modelName: body.chatModel.model,
|
||||||
openAIApiKey: getCustomOpenaiApiKey(),
|
openAIApiKey: body.chatModel.customOpenAIKey,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: body.chatModel.customOpenAIBaseURL,
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (
|
} else if (
|
||||||
|
@ -17,12 +17,7 @@ import LineListOutputParser from '../lib/outputParsers/listLineOutputParser';
|
|||||||
import LineOutputParser from '../lib/outputParsers/lineOutputParser';
|
import LineOutputParser from '../lib/outputParsers/lineOutputParser';
|
||||||
import { getDocumentsFromLinks } from '../utils/documents';
|
import { getDocumentsFromLinks } from '../utils/documents';
|
||||||
import { Document } from 'langchain/document';
|
import { Document } from 'langchain/document';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
|
||||||
import { getSearchEngineBackend } from '../config';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import computeSimilarity from '../utils/computeSimilarity';
|
import computeSimilarity from '../utils/computeSimilarity';
|
||||||
@ -137,7 +132,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
|
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
|
||||||
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
|
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
|
||||||
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
|
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
|
||||||
|
|
||||||
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
|
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
|
||||||
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
|
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
|
||||||
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
|
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
|
||||||
@ -208,37 +203,10 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
return { query: question, docs: docs };
|
return { query: question, docs: docs };
|
||||||
} else {
|
} else {
|
||||||
const searchEngine = getSearchEngineBackend();
|
const res = await searchSearxng(question, {
|
||||||
|
language: 'en',
|
||||||
let res;
|
engines: this.config.activeEngines,
|
||||||
switch (searchEngine) {
|
});
|
||||||
case 'searxng':
|
|
||||||
res = await searchSearxng(question, {
|
|
||||||
language: 'en',
|
|
||||||
engines: this.config.activeEngines,
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
case 'google':
|
|
||||||
res = await searchGooglePSE(question);
|
|
||||||
break;
|
|
||||||
case 'bing':
|
|
||||||
res = await searchBingAPI(question);
|
|
||||||
break;
|
|
||||||
case 'brave':
|
|
||||||
res = await searchBraveAPI(question);
|
|
||||||
break;
|
|
||||||
case 'yacy':
|
|
||||||
res = await searchYaCy(question);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!res?.results) {
|
|
||||||
throw new Error(
|
|
||||||
`No results found for search engine: ${searchEngine}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const documents = res.results.map(
|
const documents = res.results.map(
|
||||||
(result) =>
|
(result) =>
|
||||||
|
@ -9,11 +9,6 @@ import type { Embeddings } from '@langchain/core/embeddings';
|
|||||||
import type { IncomingMessage } from 'http';
|
import type { IncomingMessage } from 'http';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
|
||||||
getCustomOpenaiApiKey,
|
|
||||||
getCustomOpenaiApiUrl,
|
|
||||||
getCustomOpenaiModelName,
|
|
||||||
} from '../config';
|
|
||||||
|
|
||||||
export const handleConnection = async (
|
export const handleConnection = async (
|
||||||
ws: WebSocket,
|
ws: WebSocket,
|
||||||
@ -53,20 +48,14 @@ export const handleConnection = async (
|
|||||||
llm = chatModelProviders[chatModelProvider][chatModel]
|
llm = chatModelProviders[chatModelProvider][chatModel]
|
||||||
.model as unknown as BaseChatModel | undefined;
|
.model as unknown as BaseChatModel | undefined;
|
||||||
} else if (chatModelProvider == 'custom_openai') {
|
} else if (chatModelProvider == 'custom_openai') {
|
||||||
const customOpenaiApiKey = getCustomOpenaiApiKey();
|
llm = new ChatOpenAI({
|
||||||
const customOpenaiApiUrl = getCustomOpenaiApiUrl();
|
modelName: chatModel,
|
||||||
const customOpenaiModelName = getCustomOpenaiModelName();
|
openAIApiKey: searchParams.get('openAIApiKey'),
|
||||||
|
temperature: 0.7,
|
||||||
if (customOpenaiApiKey && customOpenaiApiUrl && customOpenaiModelName) {
|
configuration: {
|
||||||
llm = new ChatOpenAI({
|
baseURL: searchParams.get('openAIBaseURL'),
|
||||||
modelName: customOpenaiModelName,
|
},
|
||||||
openAIApiKey: customOpenaiApiKey,
|
}) as unknown as BaseChatModel;
|
||||||
temperature: 0.7,
|
|
||||||
configuration: {
|
|
||||||
baseURL: customOpenaiApiUrl,
|
|
||||||
},
|
|
||||||
}) as unknown as BaseChatModel;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
@ -1,18 +1,27 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"lib": ["ESNext"],
|
"target": "es2018",
|
||||||
"module": "Node16",
|
"lib": ["dom", "dom.iterable", "esnext"],
|
||||||
"moduleResolution": "Node16",
|
"allowJs": true,
|
||||||
"target": "ESNext",
|
|
||||||
"outDir": "dist",
|
|
||||||
"sourceMap": false,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"experimentalDecorators": true,
|
|
||||||
"emitDecoratorMetadata": true,
|
|
||||||
"allowSyntheticDefaultImports": true,
|
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"skipDefaultLibCheck": true
|
"strict": true,
|
||||||
|
"noEmit": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"module": "esnext",
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"jsx": "preserve",
|
||||||
|
"incremental": true,
|
||||||
|
"plugins": [
|
||||||
|
{
|
||||||
|
"name": "next"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"paths": {
|
||||||
|
"@/*": ["./*"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"include": ["src"],
|
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||||
"exclude": ["node_modules", "**/*.spec.ts"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,7 @@ import { toast } from 'sonner';
|
|||||||
import { useSearchParams } from 'next/navigation';
|
import { useSearchParams } from 'next/navigation';
|
||||||
import { getSuggestions } from '@/lib/actions';
|
import { getSuggestions } from '@/lib/actions';
|
||||||
import { Settings } from 'lucide-react';
|
import { Settings } from 'lucide-react';
|
||||||
import Link from 'next/link';
|
import SettingsDialog from './SettingsDialog';
|
||||||
import NextError from 'next/error';
|
import NextError from 'next/error';
|
||||||
|
|
||||||
export type Message = {
|
export type Message = {
|
||||||
@ -40,7 +40,6 @@ const useSocket = (
|
|||||||
const isCleaningUpRef = useRef(false);
|
const isCleaningUpRef = useRef(false);
|
||||||
const MAX_RETRIES = 3;
|
const MAX_RETRIES = 3;
|
||||||
const INITIAL_BACKOFF = 1000; // 1 second
|
const INITIAL_BACKOFF = 1000; // 1 second
|
||||||
const isConnectionErrorRef = useRef(false);
|
|
||||||
|
|
||||||
const getBackoffDelay = (retryCount: number) => {
|
const getBackoffDelay = (retryCount: number) => {
|
||||||
return Math.min(INITIAL_BACKOFF * Math.pow(2, retryCount), 10000); // Cap at 10 seconds
|
return Math.min(INITIAL_BACKOFF * Math.pow(2, retryCount), 10000); // Cap at 10 seconds
|
||||||
@ -59,17 +58,14 @@ const useSocket = (
|
|||||||
let embeddingModelProvider = localStorage.getItem(
|
let embeddingModelProvider = localStorage.getItem(
|
||||||
'embeddingModelProvider',
|
'embeddingModelProvider',
|
||||||
);
|
);
|
||||||
|
let openAIBaseURL =
|
||||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
chatModelProvider === 'custom_openai'
|
||||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
? localStorage.getItem('openAIBaseURL')
|
||||||
|
: null;
|
||||||
if (!autoImageSearch) {
|
let openAIPIKey =
|
||||||
localStorage.setItem('autoImageSearch', 'true');
|
chatModelProvider === 'custom_openai'
|
||||||
}
|
? localStorage.getItem('openAIApiKey')
|
||||||
|
: null;
|
||||||
if (!autoVideoSearch) {
|
|
||||||
localStorage.setItem('autoVideoSearch', 'false');
|
|
||||||
}
|
|
||||||
|
|
||||||
const providers = await fetch(
|
const providers = await fetch(
|
||||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||||
@ -98,13 +94,21 @@ const useSocket = (
|
|||||||
chatModelProvider =
|
chatModelProvider =
|
||||||
chatModelProvider || Object.keys(chatModelProviders)[0];
|
chatModelProvider || Object.keys(chatModelProviders)[0];
|
||||||
|
|
||||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
if (chatModelProvider === 'custom_openai') {
|
||||||
|
toast.error(
|
||||||
|
'Seems like you are using the custom OpenAI provider, please open the settings and enter a model name to use.',
|
||||||
|
);
|
||||||
|
setError(true);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!chatModelProviders ||
|
!chatModelProviders ||
|
||||||
Object.keys(chatModelProviders).length === 0
|
Object.keys(chatModelProviders).length === 0
|
||||||
)
|
)
|
||||||
return toast.error('No chat models available');
|
return toast.error('No chat models available');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!embeddingModel || !embeddingModelProvider) {
|
if (!embeddingModel || !embeddingModelProvider) {
|
||||||
@ -135,7 +139,9 @@ const useSocket = (
|
|||||||
|
|
||||||
if (
|
if (
|
||||||
Object.keys(chatModelProviders).length > 0 &&
|
Object.keys(chatModelProviders).length > 0 &&
|
||||||
!chatModelProviders[chatModelProvider]
|
(((!openAIBaseURL || !openAIPIKey) &&
|
||||||
|
chatModelProvider === 'custom_openai') ||
|
||||||
|
!chatModelProviders[chatModelProvider])
|
||||||
) {
|
) {
|
||||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
||||||
chatModelProvider =
|
chatModelProvider =
|
||||||
@ -143,11 +149,23 @@ const useSocket = (
|
|||||||
(key) => Object.keys(chatModelProviders[key]).length > 0,
|
(key) => Object.keys(chatModelProviders[key]).length > 0,
|
||||||
) || chatModelProvidersKeys[0];
|
) || chatModelProvidersKeys[0];
|
||||||
|
|
||||||
|
if (
|
||||||
|
chatModelProvider === 'custom_openai' &&
|
||||||
|
(!openAIBaseURL || !openAIPIKey)
|
||||||
|
) {
|
||||||
|
toast.error(
|
||||||
|
'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL',
|
||||||
|
);
|
||||||
|
setError(true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
chatModelProvider &&
|
chatModelProvider &&
|
||||||
|
(!openAIBaseURL || !openAIPIKey) &&
|
||||||
!chatModelProviders[chatModelProvider][chatModel]
|
!chatModelProviders[chatModelProvider][chatModel]
|
||||||
) {
|
) {
|
||||||
chatModel = Object.keys(
|
chatModel = Object.keys(
|
||||||
@ -233,8 +251,6 @@ const useSocket = (
|
|||||||
console.debug(new Date(), 'ws:connected');
|
console.debug(new Date(), 'ws:connected');
|
||||||
}
|
}
|
||||||
if (data.type === 'error') {
|
if (data.type === 'error') {
|
||||||
isConnectionErrorRef.current = true;
|
|
||||||
setError(true);
|
|
||||||
toast.error(data.data);
|
toast.error(data.data);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -249,7 +265,7 @@ const useSocket = (
|
|||||||
clearTimeout(timeoutId);
|
clearTimeout(timeoutId);
|
||||||
setIsWSReady(false);
|
setIsWSReady(false);
|
||||||
console.debug(new Date(), 'ws:disconnected');
|
console.debug(new Date(), 'ws:disconnected');
|
||||||
if (!isCleaningUpRef.current && !isConnectionErrorRef.current) {
|
if (!isCleaningUpRef.current) {
|
||||||
toast.error('Connection lost. Attempting to reconnect...');
|
toast.error('Connection lost. Attempting to reconnect...');
|
||||||
attemptReconnect();
|
attemptReconnect();
|
||||||
}
|
}
|
||||||
@ -579,17 +595,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
|
||||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
|
||||||
|
|
||||||
if (autoImageSearch === 'true') {
|
|
||||||
document.getElementById('search-images')?.click();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (autoVideoSearch === 'true') {
|
|
||||||
document.getElementById('search-videos')?.click();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -624,15 +629,17 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
return (
|
return (
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
||||||
<Link href="/settings">
|
<Settings
|
||||||
<Settings className="cursor-pointer lg:hidden" />
|
className="cursor-pointer lg:hidden"
|
||||||
</Link>
|
onClick={() => setIsSettingsOpen(true)}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col items-center justify-center min-h-screen">
|
<div className="flex flex-col items-center justify-center min-h-screen">
|
||||||
<p className="dark:text-white/70 text-black/70 text-sm">
|
<p className="dark:text-white/70 text-black/70 text-sm">
|
||||||
Failed to connect to the server. Please try again later.
|
Failed to connect to the server. Please try again later.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import { Settings } from 'lucide-react';
|
import { Settings } from 'lucide-react';
|
||||||
import EmptyChatMessageInput from './EmptyChatMessageInput';
|
import EmptyChatMessageInput from './EmptyChatMessageInput';
|
||||||
|
import SettingsDialog from './SettingsDialog';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { File } from './ChatWindow';
|
import { File } from './ChatWindow';
|
||||||
import Link from 'next/link';
|
|
||||||
|
|
||||||
const EmptyChat = ({
|
const EmptyChat = ({
|
||||||
sendMessage,
|
sendMessage,
|
||||||
@ -29,10 +29,12 @@ const EmptyChat = ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
|
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
|
||||||
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
||||||
<Link href="/settings">
|
<Settings
|
||||||
<Settings className="cursor-pointer lg:hidden" />
|
className="cursor-pointer lg:hidden"
|
||||||
</Link>
|
onClick={() => setIsSettingsOpen(true)}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
|
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
|
||||||
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
|
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
|
||||||
|
@ -11,6 +11,8 @@ import {
|
|||||||
StopCircle,
|
StopCircle,
|
||||||
Layers3,
|
Layers3,
|
||||||
Plus,
|
Plus,
|
||||||
|
Brain,
|
||||||
|
ChevronDown,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import Markdown from 'markdown-to-jsx';
|
import Markdown from 'markdown-to-jsx';
|
||||||
import Copy from './MessageActions/Copy';
|
import Copy from './MessageActions/Copy';
|
||||||
@ -41,26 +43,58 @@ const MessageBox = ({
|
|||||||
}) => {
|
}) => {
|
||||||
const [parsedMessage, setParsedMessage] = useState(message.content);
|
const [parsedMessage, setParsedMessage] = useState(message.content);
|
||||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||||
|
const [thinking, setThinking] = useState<string>('');
|
||||||
|
const [answer, setAnswer] = useState<string>('');
|
||||||
|
const [isThinkingExpanded, setIsThinkingExpanded] = useState(false);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const regex = /\[(\d+)\]/g;
|
const regex = /\[(\d+)\]/g;
|
||||||
|
const thinkRegex = /<think>(.*?)(?:<\/think>|$)(.*)/s;
|
||||||
|
|
||||||
if (
|
// Check for thinking content, including partial tags
|
||||||
message.role === 'assistant' &&
|
const match = message.content.match(thinkRegex);
|
||||||
message?.sources &&
|
if (match) {
|
||||||
message.sources.length > 0
|
const [_, thinkingContent, answerContent] = match;
|
||||||
) {
|
|
||||||
return setParsedMessage(
|
// Set thinking content even if </think> hasn't appeared yet
|
||||||
message.content.replace(
|
if (thinkingContent) {
|
||||||
regex,
|
setThinking(thinkingContent.trim());
|
||||||
(_, number) =>
|
setIsThinkingExpanded(true); // Auto-expand when thinking starts
|
||||||
`<a href="${message.sources?.[number - 1]?.metadata?.url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${number}</a>`,
|
}
|
||||||
),
|
|
||||||
);
|
// Only set answer content if we have it (after </think>)
|
||||||
|
if (answerContent) {
|
||||||
|
setAnswer(answerContent.trim());
|
||||||
|
|
||||||
|
// Process the answer part for sources if needed
|
||||||
|
if (message.role === 'assistant' && message?.sources && message.sources.length > 0) {
|
||||||
|
setParsedMessage(
|
||||||
|
answerContent.trim().replace(
|
||||||
|
regex,
|
||||||
|
(_, number) =>
|
||||||
|
`<a href="${message.sources?.[number - 1]?.metadata?.url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${number}</a>`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
setParsedMessage(answerContent.trim());
|
||||||
|
}
|
||||||
|
setSpeechMessage(answerContent.trim().replace(regex, ''));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No thinking content - process as before
|
||||||
|
if (message.role === 'assistant' && message?.sources && message.sources.length > 0) {
|
||||||
|
setParsedMessage(
|
||||||
|
message.content.replace(
|
||||||
|
regex,
|
||||||
|
(_, number) =>
|
||||||
|
`<a href="${message.sources?.[number - 1]?.metadata?.url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${number}</a>`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
setParsedMessage(message.content);
|
||||||
|
}
|
||||||
|
setSpeechMessage(message.content.replace(regex, ''));
|
||||||
}
|
}
|
||||||
|
|
||||||
setSpeechMessage(message.content.replace(regex, ''));
|
|
||||||
setParsedMessage(message.content);
|
|
||||||
}, [message.content, message.sources, message.role]);
|
}, [message.content, message.sources, message.role]);
|
||||||
|
|
||||||
const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
|
const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
|
||||||
@ -68,13 +102,7 @@ const MessageBox = ({
|
|||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
{message.role === 'user' && (
|
{message.role === 'user' && (
|
||||||
<div
|
<div className={cn('w-full', messageIndex === 0 ? 'pt-16' : 'pt-8')}>
|
||||||
className={cn(
|
|
||||||
'w-full',
|
|
||||||
messageIndex === 0 ? 'pt-16' : 'pt-8',
|
|
||||||
'break-words',
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
|
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
|
||||||
{message.content}
|
{message.content}
|
||||||
</h2>
|
</h2>
|
||||||
@ -98,27 +126,71 @@ const MessageBox = ({
|
|||||||
<MessageSources sources={message.sources} />
|
<MessageSources sources={message.sources} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<div className="flex flex-col space-y-2">
|
<div className="flex flex-col space-y-4">
|
||||||
<div className="flex flex-row items-center space-x-2">
|
{thinking && (
|
||||||
<Disc3
|
<div className="flex flex-col space-y-2 mb-4">
|
||||||
className={cn(
|
<button
|
||||||
'text-black dark:text-white',
|
onClick={() => setIsThinkingExpanded(!isThinkingExpanded)}
|
||||||
isLast && loading ? 'animate-spin' : 'animate-none',
|
className="flex flex-row items-center space-x-2 group text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white transition duration-200"
|
||||||
|
>
|
||||||
|
<Brain size={20} />
|
||||||
|
<h3 className="font-medium text-xl">Reasoning</h3>
|
||||||
|
<ChevronDown
|
||||||
|
size={16}
|
||||||
|
className={cn(
|
||||||
|
"transition-transform duration-200",
|
||||||
|
isThinkingExpanded ? "rotate-180" : ""
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{isThinkingExpanded && (
|
||||||
|
<div className="rounded-lg bg-light-secondary/50 dark:bg-dark-secondary/50 p-4">
|
||||||
|
{thinking.split('\n\n').map((paragraph, index) => {
|
||||||
|
if (!paragraph.trim()) return null;
|
||||||
|
|
||||||
|
const content = paragraph.replace(/^[•\-\d.]\s*/, '');
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={index} className="mb-2 last:mb-0">
|
||||||
|
<details className="group [&_summary::-webkit-details-marker]:hidden">
|
||||||
|
<summary className="flex items-center cursor-pointer list-none text-sm text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white">
|
||||||
|
<span className="arrow mr-2 inline-block transition-transform duration-200 group-open:rotate-90 group-open:self-start group-open:mt-1">▸</span>
|
||||||
|
<p className="relative whitespace-normal line-clamp-1 group-open:line-clamp-none after:content-['...'] after:inline group-open:after:hidden transition-all duration-200 text-ellipsis overflow-hidden group-open:overflow-visible">
|
||||||
|
{content}
|
||||||
|
</p>
|
||||||
|
</summary>
|
||||||
|
</details>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
size={20}
|
</div>
|
||||||
/>
|
)}
|
||||||
<h3 className="text-black dark:text-white font-medium text-xl">
|
|
||||||
Answer
|
<div className="flex flex-col space-y-2">
|
||||||
</h3>
|
<div className="flex flex-row items-center space-x-2">
|
||||||
|
<Disc3
|
||||||
|
className={cn(
|
||||||
|
'text-black dark:text-white',
|
||||||
|
isLast && loading ? 'animate-spin' : 'animate-none',
|
||||||
|
)}
|
||||||
|
size={20}
|
||||||
|
/>
|
||||||
|
<h3 className="text-black dark:text-white font-medium text-xl">
|
||||||
|
Answer
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<Markdown
|
||||||
|
className={cn(
|
||||||
|
'prose prose-h1:mb-3 prose-h2:mb-2 prose-h2:mt-6 prose-h2:font-[800] prose-h3:mt-4 prose-h3:mb-1.5 prose-h3:font-[600] dark:prose-invert prose-p:leading-relaxed prose-pre:p-0 font-[400]',
|
||||||
|
'max-w-none break-words text-black dark:text-white',
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{parsedMessage}
|
||||||
|
</Markdown>
|
||||||
</div>
|
</div>
|
||||||
<Markdown
|
|
||||||
className={cn(
|
|
||||||
'prose prose-h1:mb-3 prose-h2:mb-2 prose-h2:mt-6 prose-h2:font-[800] prose-h3:mt-4 prose-h3:mb-1.5 prose-h3:font-[600] dark:prose-invert prose-p:leading-relaxed prose-pre:p-0 font-[400]',
|
|
||||||
'max-w-none break-words text-black dark:text-white',
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
{parsedMessage}
|
|
||||||
</Markdown>
|
|
||||||
{loading && isLast ? null : (
|
{loading && isLast ? null : (
|
||||||
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4 -mx-2">
|
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4 -mx-2">
|
||||||
<div className="flex flex-row items-center space-x-1">
|
<div className="flex flex-row items-center space-x-1">
|
||||||
|
@ -110,7 +110,7 @@ const Attach = ({
|
|||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => fileInputRef.current.click()}
|
onClick={() => fileInputRef.current.click()}
|
||||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
type="file"
|
type="file"
|
||||||
@ -128,7 +128,7 @@ const Attach = ({
|
|||||||
setFiles([]);
|
setFiles([]);
|
||||||
setFileIds([]);
|
setFileIds([]);
|
||||||
}}
|
}}
|
||||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||||
>
|
>
|
||||||
<Trash size={14} />
|
<Trash size={14} />
|
||||||
<p className="text-xs">Clear</p>
|
<p className="text-xs">Clear</p>
|
||||||
@ -145,7 +145,7 @@ const Attach = ({
|
|||||||
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||||
<File size={16} className="text-white/70" />
|
<File size={16} className="text-white/70" />
|
||||||
</div>
|
</div>
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
{file.fileName.length > 25
|
{file.fileName.length > 25
|
||||||
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
||||||
'...' +
|
'...' +
|
||||||
|
@ -82,7 +82,7 @@ const AttachSmall = ({
|
|||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => fileInputRef.current.click()}
|
onClick={() => fileInputRef.current.click()}
|
||||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
type="file"
|
type="file"
|
||||||
@ -100,7 +100,7 @@ const AttachSmall = ({
|
|||||||
setFiles([]);
|
setFiles([]);
|
||||||
setFileIds([]);
|
setFileIds([]);
|
||||||
}}
|
}}
|
||||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
className="flex flex-row items-center space-x-1 text-white/70 hover:text-white transition duration-200"
|
||||||
>
|
>
|
||||||
<Trash size={14} />
|
<Trash size={14} />
|
||||||
<p className="text-xs">Clear</p>
|
<p className="text-xs">Clear</p>
|
||||||
@ -117,7 +117,7 @@ const AttachSmall = ({
|
|||||||
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||||
<File size={16} className="text-white/70" />
|
<File size={16} className="text-white/70" />
|
||||||
</div>
|
</div>
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
{file.fileName.length > 25
|
{file.fileName.length > 25
|
||||||
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
||||||
'...' +
|
'...' +
|
||||||
|
@ -27,7 +27,6 @@ const SearchImages = ({
|
|||||||
<>
|
<>
|
||||||
{!loading && images === null && (
|
{!loading && images === null && (
|
||||||
<button
|
<button
|
||||||
id="search-images"
|
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
|
|
||||||
|
@ -42,7 +42,6 @@ const Searchvideos = ({
|
|||||||
<>
|
<>
|
||||||
{!loading && videos === null && (
|
{!loading && videos === null && (
|
||||||
<button
|
<button
|
||||||
id="search-videos"
|
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
|
|
||||||
|
528
ui/components/SettingsDialog.tsx
Normal file
528
ui/components/SettingsDialog.tsx
Normal file
@ -0,0 +1,528 @@
|
|||||||
|
import { cn } from '@/lib/utils';
|
||||||
|
import {
|
||||||
|
Dialog,
|
||||||
|
DialogPanel,
|
||||||
|
DialogTitle,
|
||||||
|
Transition,
|
||||||
|
TransitionChild,
|
||||||
|
} from '@headlessui/react';
|
||||||
|
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
|
||||||
|
import React, {
|
||||||
|
Fragment,
|
||||||
|
useEffect,
|
||||||
|
useState,
|
||||||
|
type SelectHTMLAttributes,
|
||||||
|
} from 'react';
|
||||||
|
import ThemeSwitcher from './theme/Switcher';
|
||||||
|
|
||||||
|
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||||
|
|
||||||
|
const Input = ({ className, ...restProps }: InputProps) => {
|
||||||
|
return (
|
||||||
|
<input
|
||||||
|
{...restProps}
|
||||||
|
className={cn(
|
||||||
|
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
||||||
|
options: { value: string; label: string; disabled?: boolean }[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
||||||
|
return (
|
||||||
|
<select
|
||||||
|
{...restProps}
|
||||||
|
className={cn(
|
||||||
|
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{options.map(({ label, value, disabled }) => {
|
||||||
|
return (
|
||||||
|
<option key={value} value={value} disabled={disabled}>
|
||||||
|
{label}
|
||||||
|
</option>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</select>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
interface SettingsType {
|
||||||
|
chatModelProviders: {
|
||||||
|
[key: string]: [Record<string, any>];
|
||||||
|
};
|
||||||
|
embeddingModelProviders: {
|
||||||
|
[key: string]: [Record<string, any>];
|
||||||
|
};
|
||||||
|
openaiApiKey: string;
|
||||||
|
groqApiKey: string;
|
||||||
|
anthropicApiKey: string;
|
||||||
|
geminiApiKey: string;
|
||||||
|
ollamaApiUrl: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SettingsDialog = ({
|
||||||
|
isOpen,
|
||||||
|
setIsOpen,
|
||||||
|
}: {
|
||||||
|
isOpen: boolean;
|
||||||
|
setIsOpen: (isOpen: boolean) => void;
|
||||||
|
}) => {
|
||||||
|
const [config, setConfig] = useState<SettingsType | null>(null);
|
||||||
|
const [chatModels, setChatModels] = useState<Record<string, any>>({});
|
||||||
|
const [embeddingModels, setEmbeddingModels] = useState<Record<string, any>>(
|
||||||
|
{},
|
||||||
|
);
|
||||||
|
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<
|
||||||
|
string | null
|
||||||
|
>(null);
|
||||||
|
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] =
|
||||||
|
useState<string | null>(null);
|
||||||
|
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||||
|
string | null
|
||||||
|
>(null);
|
||||||
|
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>('');
|
||||||
|
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>('');
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
const [isUpdating, setIsUpdating] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (isOpen) {
|
||||||
|
const fetchConfig = async () => {
|
||||||
|
setIsLoading(true);
|
||||||
|
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = (await res.json()) as SettingsType;
|
||||||
|
setConfig(data);
|
||||||
|
|
||||||
|
const chatModelProvidersKeys = Object.keys(
|
||||||
|
data.chatModelProviders || {},
|
||||||
|
);
|
||||||
|
const embeddingModelProvidersKeys = Object.keys(
|
||||||
|
data.embeddingModelProviders || {},
|
||||||
|
);
|
||||||
|
|
||||||
|
const defaultChatModelProvider =
|
||||||
|
chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
|
||||||
|
const defaultEmbeddingModelProvider =
|
||||||
|
embeddingModelProvidersKeys.length > 0
|
||||||
|
? embeddingModelProvidersKeys[0]
|
||||||
|
: '';
|
||||||
|
|
||||||
|
const chatModelProvider =
|
||||||
|
localStorage.getItem('chatModelProvider') ||
|
||||||
|
defaultChatModelProvider ||
|
||||||
|
'';
|
||||||
|
const chatModel =
|
||||||
|
localStorage.getItem('chatModel') ||
|
||||||
|
(data.chatModelProviders &&
|
||||||
|
data.chatModelProviders[chatModelProvider]?.length > 0
|
||||||
|
? data.chatModelProviders[chatModelProvider][0].name
|
||||||
|
: undefined) ||
|
||||||
|
'';
|
||||||
|
const embeddingModelProvider =
|
||||||
|
localStorage.getItem('embeddingModelProvider') ||
|
||||||
|
defaultEmbeddingModelProvider ||
|
||||||
|
'';
|
||||||
|
const embeddingModel =
|
||||||
|
localStorage.getItem('embeddingModel') ||
|
||||||
|
(data.embeddingModelProviders &&
|
||||||
|
data.embeddingModelProviders[embeddingModelProvider]?.[0].name) ||
|
||||||
|
'';
|
||||||
|
|
||||||
|
setSelectedChatModelProvider(chatModelProvider);
|
||||||
|
setSelectedChatModel(chatModel);
|
||||||
|
setSelectedEmbeddingModelProvider(embeddingModelProvider);
|
||||||
|
setSelectedEmbeddingModel(embeddingModel);
|
||||||
|
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
||||||
|
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || '');
|
||||||
|
setChatModels(data.chatModelProviders || {});
|
||||||
|
setEmbeddingModels(data.embeddingModelProviders || {});
|
||||||
|
setIsLoading(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchConfig();
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [isOpen]);
|
||||||
|
|
||||||
|
const handleSubmit = async () => {
|
||||||
|
setIsUpdating(true);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify(config),
|
||||||
|
});
|
||||||
|
|
||||||
|
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
||||||
|
localStorage.setItem('chatModel', selectedChatModel!);
|
||||||
|
localStorage.setItem(
|
||||||
|
'embeddingModelProvider',
|
||||||
|
selectedEmbeddingModelProvider!,
|
||||||
|
);
|
||||||
|
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
|
||||||
|
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
|
||||||
|
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(err);
|
||||||
|
} finally {
|
||||||
|
setIsUpdating(false);
|
||||||
|
setIsOpen(false);
|
||||||
|
|
||||||
|
window.location.reload();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Transition appear show={isOpen} as={Fragment}>
|
||||||
|
<Dialog
|
||||||
|
as="div"
|
||||||
|
className="relative z-50"
|
||||||
|
onClose={() => setIsOpen(false)}
|
||||||
|
>
|
||||||
|
<TransitionChild
|
||||||
|
as={Fragment}
|
||||||
|
enter="ease-out duration-300"
|
||||||
|
enterFrom="opacity-0"
|
||||||
|
enterTo="opacity-100"
|
||||||
|
leave="ease-in duration-200"
|
||||||
|
leaveFrom="opacity-100"
|
||||||
|
leaveTo="opacity-0"
|
||||||
|
>
|
||||||
|
<div className="fixed inset-0 bg-white/50 dark:bg-black/50" />
|
||||||
|
</TransitionChild>
|
||||||
|
<div className="fixed inset-0 overflow-y-auto">
|
||||||
|
<div className="flex min-h-full items-center justify-center p-4 text-center">
|
||||||
|
<TransitionChild
|
||||||
|
as={Fragment}
|
||||||
|
enter="ease-out duration-200"
|
||||||
|
enterFrom="opacity-0 scale-95"
|
||||||
|
enterTo="opacity-100 scale-100"
|
||||||
|
leave="ease-in duration-100"
|
||||||
|
leaveFrom="opacity-100 scale-200"
|
||||||
|
leaveTo="opacity-0 scale-95"
|
||||||
|
>
|
||||||
|
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
|
||||||
|
<DialogTitle className="text-xl font-medium leading-6 dark:text-white">
|
||||||
|
Settings
|
||||||
|
</DialogTitle>
|
||||||
|
{config && !isLoading && (
|
||||||
|
<div className="flex flex-col space-y-4 mt-6">
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Theme
|
||||||
|
</p>
|
||||||
|
<ThemeSwitcher />
|
||||||
|
</div>
|
||||||
|
{config.chatModelProviders && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Chat model Provider
|
||||||
|
</p>
|
||||||
|
<Select
|
||||||
|
value={selectedChatModelProvider ?? undefined}
|
||||||
|
onChange={(e) => {
|
||||||
|
setSelectedChatModelProvider(e.target.value);
|
||||||
|
if (e.target.value === 'custom_openai') {
|
||||||
|
setSelectedChatModel('');
|
||||||
|
} else {
|
||||||
|
setSelectedChatModel(
|
||||||
|
config.chatModelProviders[e.target.value][0]
|
||||||
|
.name,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
options={Object.keys(config.chatModelProviders).map(
|
||||||
|
(provider) => ({
|
||||||
|
value: provider,
|
||||||
|
label:
|
||||||
|
provider.charAt(0).toUpperCase() +
|
||||||
|
provider.slice(1),
|
||||||
|
}),
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{selectedChatModelProvider &&
|
||||||
|
selectedChatModelProvider != 'custom_openai' && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Chat Model
|
||||||
|
</p>
|
||||||
|
<Select
|
||||||
|
value={selectedChatModel ?? undefined}
|
||||||
|
onChange={(e) =>
|
||||||
|
setSelectedChatModel(e.target.value)
|
||||||
|
}
|
||||||
|
options={(() => {
|
||||||
|
const chatModelProvider =
|
||||||
|
config.chatModelProviders[
|
||||||
|
selectedChatModelProvider
|
||||||
|
];
|
||||||
|
|
||||||
|
return chatModelProvider
|
||||||
|
? chatModelProvider.length > 0
|
||||||
|
? chatModelProvider.map((model) => ({
|
||||||
|
value: model.name,
|
||||||
|
label: model.displayName,
|
||||||
|
}))
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
value: '',
|
||||||
|
label: 'No models available',
|
||||||
|
disabled: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
value: '',
|
||||||
|
label:
|
||||||
|
'Invalid provider, please check backend logs',
|
||||||
|
disabled: true,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
})()}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{selectedChatModelProvider &&
|
||||||
|
selectedChatModelProvider === 'custom_openai' && (
|
||||||
|
<>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Model name
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Model name"
|
||||||
|
defaultValue={selectedChatModel!}
|
||||||
|
onChange={(e) =>
|
||||||
|
setSelectedChatModel(e.target.value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Custom OpenAI API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Custom OpenAI API Key"
|
||||||
|
defaultValue={customOpenAIApiKey!}
|
||||||
|
onChange={(e) =>
|
||||||
|
setCustomOpenAIApiKey(e.target.value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Custom OpenAI Base URL
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Custom OpenAI Base URL"
|
||||||
|
defaultValue={customOpenAIBaseURL!}
|
||||||
|
onChange={(e) =>
|
||||||
|
setCustomOpenAIBaseURL(e.target.value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{/* Embedding models */}
|
||||||
|
{config.embeddingModelProviders && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Embedding model Provider
|
||||||
|
</p>
|
||||||
|
<Select
|
||||||
|
value={selectedEmbeddingModelProvider ?? undefined}
|
||||||
|
onChange={(e) => {
|
||||||
|
setSelectedEmbeddingModelProvider(e.target.value);
|
||||||
|
setSelectedEmbeddingModel(
|
||||||
|
config.embeddingModelProviders[e.target.value][0]
|
||||||
|
.name,
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
options={Object.keys(
|
||||||
|
config.embeddingModelProviders,
|
||||||
|
).map((provider) => ({
|
||||||
|
label:
|
||||||
|
provider.charAt(0).toUpperCase() +
|
||||||
|
provider.slice(1),
|
||||||
|
value: provider,
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{selectedEmbeddingModelProvider && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Embedding Model
|
||||||
|
</p>
|
||||||
|
<Select
|
||||||
|
value={selectedEmbeddingModel ?? undefined}
|
||||||
|
onChange={(e) =>
|
||||||
|
setSelectedEmbeddingModel(e.target.value)
|
||||||
|
}
|
||||||
|
options={(() => {
|
||||||
|
const embeddingModelProvider =
|
||||||
|
config.embeddingModelProviders[
|
||||||
|
selectedEmbeddingModelProvider
|
||||||
|
];
|
||||||
|
|
||||||
|
return embeddingModelProvider
|
||||||
|
? embeddingModelProvider.length > 0
|
||||||
|
? embeddingModelProvider.map((model) => ({
|
||||||
|
label: model.displayName,
|
||||||
|
value: model.name,
|
||||||
|
}))
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
label: 'No embedding models available',
|
||||||
|
value: '',
|
||||||
|
disabled: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
label:
|
||||||
|
'Invalid provider, please check backend logs',
|
||||||
|
value: '',
|
||||||
|
disabled: true,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
})()}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
OpenAI API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="OpenAI API Key"
|
||||||
|
defaultValue={config.openaiApiKey}
|
||||||
|
onChange={(e) =>
|
||||||
|
setConfig({
|
||||||
|
...config,
|
||||||
|
openaiApiKey: e.target.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Ollama API URL
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Ollama API URL"
|
||||||
|
defaultValue={config.ollamaApiUrl}
|
||||||
|
onChange={(e) =>
|
||||||
|
setConfig({
|
||||||
|
...config,
|
||||||
|
ollamaApiUrl: e.target.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
GROQ API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="GROQ API Key"
|
||||||
|
defaultValue={config.groqApiKey}
|
||||||
|
onChange={(e) =>
|
||||||
|
setConfig({
|
||||||
|
...config,
|
||||||
|
groqApiKey: e.target.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Anthropic API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Anthropic API key"
|
||||||
|
defaultValue={config.anthropicApiKey}
|
||||||
|
onChange={(e) =>
|
||||||
|
setConfig({
|
||||||
|
...config,
|
||||||
|
anthropicApiKey: e.target.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Gemini API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Gemini API key"
|
||||||
|
defaultValue={config.geminiApiKey}
|
||||||
|
onChange={(e) =>
|
||||||
|
setConfig({
|
||||||
|
...config,
|
||||||
|
geminiApiKey: e.target.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{isLoading && (
|
||||||
|
<div className="w-full flex items-center justify-center mt-6 text-black/70 dark:text-white/70 py-6">
|
||||||
|
<RefreshCcw className="animate-spin" />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="w-full mt-6 space-y-2">
|
||||||
|
<p className="text-xs text-black/50 dark:text-white/50">
|
||||||
|
We'll refresh the page after updating the settings.
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
onClick={handleSubmit}
|
||||||
|
className="bg-[#24A0ED] flex flex-row items-center space-x-2 text-white disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#ececec21] rounded-full px-4 py-2"
|
||||||
|
disabled={isLoading || isUpdating}
|
||||||
|
>
|
||||||
|
{isUpdating ? (
|
||||||
|
<RefreshCw size={20} className="animate-spin" />
|
||||||
|
) : (
|
||||||
|
<CloudUpload size={20} />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</DialogPanel>
|
||||||
|
</TransitionChild>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Dialog>
|
||||||
|
</Transition>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default SettingsDialog;
|
@ -6,6 +6,7 @@ import Link from 'next/link';
|
|||||||
import { useSelectedLayoutSegments } from 'next/navigation';
|
import { useSelectedLayoutSegments } from 'next/navigation';
|
||||||
import React, { useState, type ReactNode } from 'react';
|
import React, { useState, type ReactNode } from 'react';
|
||||||
import Layout from './Layout';
|
import Layout from './Layout';
|
||||||
|
import SettingsDialog from './SettingsDialog';
|
||||||
|
|
||||||
const VerticalIconContainer = ({ children }: { children: ReactNode }) => {
|
const VerticalIconContainer = ({ children }: { children: ReactNode }) => {
|
||||||
return (
|
return (
|
||||||
@ -66,9 +67,15 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
|||||||
))}
|
))}
|
||||||
</VerticalIconContainer>
|
</VerticalIconContainer>
|
||||||
|
|
||||||
<Link href="/settings">
|
<Settings
|
||||||
<Settings className="cursor-pointer" />
|
onClick={() => setIsSettingsOpen(!isSettingsOpen)}
|
||||||
</Link>
|
className="cursor-pointer"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingsDialog
|
||||||
|
isOpen={isSettingsOpen}
|
||||||
|
setIsOpen={setIsSettingsOpen}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
'use client';
|
'use client';
|
||||||
import { useTheme } from 'next-themes';
|
import { useTheme } from 'next-themes';
|
||||||
|
import { SunIcon, MoonIcon, MonitorIcon } from 'lucide-react';
|
||||||
import { useCallback, useEffect, useState } from 'react';
|
import { useCallback, useEffect, useState } from 'react';
|
||||||
import Select from '../ui/Select';
|
import { Select } from '../SettingsDialog';
|
||||||
|
|
||||||
type Theme = 'dark' | 'light' | 'system';
|
type Theme = 'dark' | 'light' | 'system';
|
||||||
|
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
import { cn } from '@/lib/utils';
|
|
||||||
import { SelectHTMLAttributes } from 'react';
|
|
||||||
|
|
||||||
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
|
||||||
options: { value: string; label: string; disabled?: boolean }[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
|
||||||
return (
|
|
||||||
<select
|
|
||||||
{...restProps}
|
|
||||||
className={cn(
|
|
||||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
|
||||||
className,
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
{options.map(({ label, value, disabled }) => {
|
|
||||||
return (
|
|
||||||
<option key={value} value={value} disabled={disabled}>
|
|
||||||
{label}
|
|
||||||
</option>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</select>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default Select;
|
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-frontend",
|
"name": "perplexica-frontend",
|
||||||
"version": "1.10.0-rc3",
|
"version": "1.10.0-rc2",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
Reference in New Issue
Block a user