mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-12-14 23:58:14 +00:00
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f88f179920 | ||
|
|
4cb0aeeee3 | ||
|
|
e8fe74ae7c | ||
|
|
ed47191d9b | ||
|
|
b4d787d333 | ||
|
|
38b1995677 | ||
|
|
f28257b480 | ||
|
|
9b088cd161 | ||
|
|
94ea6c372a | ||
|
|
6e61c88c9e | ||
|
|
ba7b92ffde | ||
|
|
f8fd2a6fb0 | ||
|
|
0440a810f5 | ||
|
|
e3fef3a1be | ||
|
|
4bf69dfdda | ||
|
|
9f45ecb98d | ||
|
|
c710f4f88c | ||
|
|
79f6a52b5b |
11
README.md
11
README.md
@@ -10,6 +10,7 @@
|
|||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
||||||
- [Non-Docker Installation](#non-docker-installation)
|
- [Non-Docker Installation](#non-docker-installation)
|
||||||
|
- [Ollama connection errors](#ollama-connection-errors)
|
||||||
- [One-Click Deployment](#one-click-deployment)
|
- [One-Click Deployment](#one-click-deployment)
|
||||||
- [Upcoming Features](#upcoming-features)
|
- [Upcoming Features](#upcoming-features)
|
||||||
- [Support Us](#support-us)
|
- [Support Us](#support-us)
|
||||||
@@ -90,6 +91,16 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
|||||||
|
|
||||||
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
|
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.
|
||||||
|
|
||||||
|
#### Ollama connection errors
|
||||||
|
|
||||||
|
If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following:
|
||||||
|
|
||||||
|
On Windows: `http://host.docker.internal:11434`<br>
|
||||||
|
On Mac: `http://host.docker.internal:11434`<br>
|
||||||
|
On Linux: `http://private_ip_of_computer_hosting_ollama:11434`
|
||||||
|
|
||||||
|
You need to edit the ports accordingly.
|
||||||
|
|
||||||
## One-Click Deployment
|
## One-Click Deployment
|
||||||
|
|
||||||
[](https://repocloud.io/details/?app_id=267)
|
[](https://repocloud.io/details/?app_id=267)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-backend",
|
"name": "perplexica-backend",
|
||||||
"version": "1.2.0",
|
"version": "1.3.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -29,7 +29,6 @@
|
|||||||
"dotenv": "^16.4.5",
|
"dotenv": "^16.4.5",
|
||||||
"express": "^4.19.2",
|
"express": "^4.19.2",
|
||||||
"langchain": "^0.1.30",
|
"langchain": "^0.1.30",
|
||||||
"react-text-to-speech": "^0.14.5",
|
|
||||||
"winston": "^3.13.0",
|
"winston": "^3.13.0",
|
||||||
"ws": "^8.16.0",
|
"ws": "^8.16.0",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import {
|
|||||||
} from '../config';
|
} from '../config';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
export const getAvailableProviders = async () => {
|
export const getAvailableChatModelProviders = async () => {
|
||||||
const openAIApiKey = getOpenaiApiKey();
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
const groqApiKey = getGroqApiKey();
|
const groqApiKey = getGroqApiKey();
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
@@ -33,10 +33,6 @@ export const getAvailableProviders = async () => {
|
|||||||
modelName: 'gpt-4-turbo',
|
modelName: 'gpt-4-turbo',
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}),
|
}),
|
||||||
embeddings: new OpenAIEmbeddings({
|
|
||||||
openAIApiKey,
|
|
||||||
modelName: 'text-embedding-3-large',
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Error loading OpenAI models: ${err}`);
|
logger.error(`Error loading OpenAI models: ${err}`);
|
||||||
@@ -86,10 +82,6 @@ export const getAvailableProviders = async () => {
|
|||||||
baseURL: 'https://api.groq.com/openai/v1',
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
embeddings: new OpenAIEmbeddings({
|
|
||||||
openAIApiKey: openAIApiKey,
|
|
||||||
modelName: 'text-embedding-3-large',
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Error loading Groq models: ${err}`);
|
logger.error(`Error loading Groq models: ${err}`);
|
||||||
@@ -98,7 +90,11 @@ export const getAvailableProviders = async () => {
|
|||||||
|
|
||||||
if (ollamaEndpoint) {
|
if (ollamaEndpoint) {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${ollamaEndpoint}/api/tags`);
|
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
@@ -110,17 +106,60 @@ export const getAvailableProviders = async () => {
|
|||||||
});
|
});
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
if (Object.keys(models['ollama']).length > 0) {
|
|
||||||
models['ollama']['embeddings'] = new OllamaEmbeddings({
|
|
||||||
baseUrl: ollamaEndpoint,
|
|
||||||
model: models['ollama'][Object.keys(models['ollama'])[0]].model,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Error loading Ollama models: ${err}`);
|
logger.error(`Error loading Ollama models: ${err}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
models['custom_openai'] = {};
|
||||||
|
|
||||||
|
return models;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getAvailableEmbeddingModelProviders = async () => {
|
||||||
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
|
|
||||||
|
const models = {};
|
||||||
|
|
||||||
|
if (openAIApiKey) {
|
||||||
|
try {
|
||||||
|
models['openai'] = {
|
||||||
|
'Text embedding 3 small': new OpenAIEmbeddings({
|
||||||
|
openAIApiKey,
|
||||||
|
modelName: 'text-embedding-3-small',
|
||||||
|
}),
|
||||||
|
'Text embedding 3 large': new OpenAIEmbeddings({
|
||||||
|
openAIApiKey,
|
||||||
|
modelName: 'text-embedding-3-large',
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(`Error loading OpenAI embeddings: ${err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ollamaEndpoint) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
|
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
||||||
|
acc[model.model] = new OllamaEmbeddings({
|
||||||
|
baseUrl: ollamaEndpoint,
|
||||||
|
model: model.model,
|
||||||
|
});
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(`Error loading Ollama embeddings: ${err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return models;
|
return models;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { getAvailableProviders } from '../lib/providers';
|
import {
|
||||||
|
getAvailableChatModelProviders,
|
||||||
|
getAvailableEmbeddingModelProviders,
|
||||||
|
} from '../lib/providers';
|
||||||
import {
|
import {
|
||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
@@ -12,16 +15,24 @@ const router = express.Router();
|
|||||||
router.get('/', async (_, res) => {
|
router.get('/', async (_, res) => {
|
||||||
const config = {};
|
const config = {};
|
||||||
|
|
||||||
const providers = await getAvailableProviders();
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
|
getAvailableChatModelProviders(),
|
||||||
|
getAvailableEmbeddingModelProviders(),
|
||||||
|
]);
|
||||||
|
|
||||||
for (const provider in providers) {
|
config['chatModelProviders'] = {};
|
||||||
delete providers[provider]['embeddings'];
|
config['embeddingModelProviders'] = {};
|
||||||
|
|
||||||
|
for (const provider in chatModelProviders) {
|
||||||
|
config['chatModelProviders'][provider] = Object.keys(
|
||||||
|
chatModelProviders[provider],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
config['providers'] = {};
|
for (const provider in embeddingModelProviders) {
|
||||||
|
config['embeddingModelProviders'][provider] = Object.keys(
|
||||||
for (const provider in providers) {
|
embeddingModelProviders[provider],
|
||||||
config['providers'][provider] = Object.keys(providers[provider]);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import handleImageSearch from '../agents/imageSearchAgent';
|
import handleImageSearch from '../agents/imageSearchAgent';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { getAvailableProviders } from '../lib/providers';
|
import { getAvailableChatModelProviders } from '../lib/providers';
|
||||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
@@ -19,7 +19,7 @@ router.post('/', async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const chatModels = await getAvailableProviders();
|
const chatModels = await getAvailableChatModelProviders();
|
||||||
const provider = chat_model_provider || Object.keys(chatModels)[0];
|
const provider = chat_model_provider || Object.keys(chatModels)[0];
|
||||||
const chatModel = chat_model || Object.keys(chatModels[provider])[0];
|
const chatModel = chat_model || Object.keys(chatModels[provider])[0];
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,20 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { getAvailableProviders } from '../lib/providers';
|
import {
|
||||||
|
getAvailableChatModelProviders,
|
||||||
|
getAvailableEmbeddingModelProviders,
|
||||||
|
} from '../lib/providers';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
router.get('/', async (req, res) => {
|
router.get('/', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const providers = await getAvailableProviders();
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
|
getAvailableChatModelProviders(),
|
||||||
|
getAvailableEmbeddingModelProviders(),
|
||||||
|
]);
|
||||||
|
|
||||||
res.status(200).json({ providers });
|
res.status(200).json({ chatModelProviders, embeddingModelProviders });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
res.status(500).json({ message: 'An error has occurred.' });
|
res.status(500).json({ message: 'An error has occurred.' });
|
||||||
logger.error(err.message);
|
logger.error(err.message);
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { getAvailableProviders } from '../lib/providers';
|
import { getAvailableChatModelProviders } from '../lib/providers';
|
||||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import handleVideoSearch from '../agents/videoSearchAgent';
|
import handleVideoSearch from '../agents/videoSearchAgent';
|
||||||
@@ -19,7 +19,7 @@ router.post('/', async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const chatModels = await getAvailableProviders();
|
const chatModels = await getAvailableChatModelProviders();
|
||||||
const provider = chat_model_provider || Object.keys(chatModels)[0];
|
const provider = chat_model_provider || Object.keys(chatModels)[0];
|
||||||
const chatModel = chat_model || Object.keys(chatModels[provider])[0];
|
const chatModel = chat_model || Object.keys(chatModels[provider])[0];
|
||||||
|
|
||||||
|
|||||||
@@ -1,37 +1,79 @@
|
|||||||
import { WebSocket } from 'ws';
|
import { WebSocket } from 'ws';
|
||||||
import { handleMessage } from './messageHandler';
|
import { handleMessage } from './messageHandler';
|
||||||
import { getAvailableProviders } from '../lib/providers';
|
import {
|
||||||
|
getAvailableEmbeddingModelProviders,
|
||||||
|
getAvailableChatModelProviders,
|
||||||
|
} from '../lib/providers';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import type { Embeddings } from '@langchain/core/embeddings';
|
import type { Embeddings } from '@langchain/core/embeddings';
|
||||||
import type { IncomingMessage } from 'http';
|
import type { IncomingMessage } from 'http';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
|
||||||
export const handleConnection = async (
|
export const handleConnection = async (
|
||||||
ws: WebSocket,
|
ws: WebSocket,
|
||||||
request: IncomingMessage,
|
request: IncomingMessage,
|
||||||
) => {
|
) => {
|
||||||
|
try {
|
||||||
const searchParams = new URL(request.url, `http://${request.headers.host}`)
|
const searchParams = new URL(request.url, `http://${request.headers.host}`)
|
||||||
.searchParams;
|
.searchParams;
|
||||||
|
|
||||||
const models = await getAvailableProviders();
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
const provider =
|
getAvailableChatModelProviders(),
|
||||||
searchParams.get('chatModelProvider') || Object.keys(models)[0];
|
getAvailableEmbeddingModelProviders(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const chatModelProvider =
|
||||||
|
searchParams.get('chatModelProvider') ||
|
||||||
|
Object.keys(chatModelProviders)[0];
|
||||||
const chatModel =
|
const chatModel =
|
||||||
searchParams.get('chatModel') || Object.keys(models[provider])[0];
|
searchParams.get('chatModel') ||
|
||||||
|
Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
|
|
||||||
|
const embeddingModelProvider =
|
||||||
|
searchParams.get('embeddingModelProvider') ||
|
||||||
|
Object.keys(embeddingModelProviders)[0];
|
||||||
|
const embeddingModel =
|
||||||
|
searchParams.get('embeddingModel') ||
|
||||||
|
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||||
|
|
||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
let embeddings: Embeddings | undefined;
|
let embeddings: Embeddings | undefined;
|
||||||
|
|
||||||
if (models[provider] && models[provider][chatModel]) {
|
if (
|
||||||
llm = models[provider][chatModel] as BaseChatModel | undefined;
|
chatModelProviders[chatModelProvider] &&
|
||||||
embeddings = models[provider].embeddings as Embeddings | undefined;
|
chatModelProviders[chatModelProvider][chatModel] &&
|
||||||
|
chatModelProvider != 'custom_openai'
|
||||||
|
) {
|
||||||
|
llm = chatModelProviders[chatModelProvider][chatModel] as
|
||||||
|
| BaseChatModel
|
||||||
|
| undefined;
|
||||||
|
} else if (chatModelProvider == 'custom_openai') {
|
||||||
|
llm = new ChatOpenAI({
|
||||||
|
modelName: chatModel,
|
||||||
|
openAIApiKey: searchParams.get('openAIApiKey'),
|
||||||
|
temperature: 0.7,
|
||||||
|
configuration: {
|
||||||
|
baseURL: searchParams.get('openAIBaseURL'),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
embeddingModelProviders[embeddingModelProvider] &&
|
||||||
|
embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||||
|
) {
|
||||||
|
embeddings = embeddingModelProviders[embeddingModelProvider][
|
||||||
|
embeddingModel
|
||||||
|
] as Embeddings | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm || !embeddings) {
|
if (!llm || !embeddings) {
|
||||||
ws.send(
|
ws.send(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
type: 'error',
|
type: 'error',
|
||||||
data: 'Invalid LLM or embeddings model selected',
|
data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
|
||||||
|
key: 'INVALID_MODEL_SELECTED',
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
ws.close();
|
ws.close();
|
||||||
@@ -44,4 +86,15 @@ export const handleConnection = async (
|
|||||||
);
|
);
|
||||||
|
|
||||||
ws.on('close', () => logger.debug('Connection closed'));
|
ws.on('close', () => logger.debug('Connection closed'));
|
||||||
|
} catch (err) {
|
||||||
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Internal server error.',
|
||||||
|
key: 'INTERNAL_SERVER_ERROR',
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
ws.close();
|
||||||
|
logger.error(err);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -57,7 +57,13 @@ const handleEmitterEvents = (
|
|||||||
});
|
});
|
||||||
emitter.on('error', (data) => {
|
emitter.on('error', (data) => {
|
||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
ws.send(JSON.stringify({ type: 'error', data: parsedData.data }));
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: parsedData.data,
|
||||||
|
key: 'CHAIN_ERROR',
|
||||||
|
}),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -73,7 +79,11 @@ export const handleMessage = async (
|
|||||||
|
|
||||||
if (!parsedMessage.content)
|
if (!parsedMessage.content)
|
||||||
return ws.send(
|
return ws.send(
|
||||||
JSON.stringify({ type: 'error', data: 'Invalid message format' }),
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Invalid message format',
|
||||||
|
key: 'INVALID_FORMAT',
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
const history: BaseMessage[] = parsedMessage.history.map((msg) => {
|
const history: BaseMessage[] = parsedMessage.history.map((msg) => {
|
||||||
@@ -99,11 +109,23 @@ export const handleMessage = async (
|
|||||||
);
|
);
|
||||||
handleEmitterEvents(emitter, ws, id);
|
handleEmitterEvents(emitter, ws, id);
|
||||||
} else {
|
} else {
|
||||||
ws.send(JSON.stringify({ type: 'error', data: 'Invalid focus mode' }));
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Invalid focus mode',
|
||||||
|
key: 'INVALID_FOCUS_MODE',
|
||||||
|
}),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ws.send(JSON.stringify({ type: 'error', data: 'Invalid message format' }));
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
data: 'Invalid message format',
|
||||||
|
key: 'INVALID_FORMAT',
|
||||||
|
}),
|
||||||
|
);
|
||||||
logger.error(`Failed to handle message: ${err}`);
|
logger.error(`Failed to handle message: ${err}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { Montserrat } from 'next/font/google';
|
|||||||
import './globals.css';
|
import './globals.css';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import Sidebar from '@/components/Sidebar';
|
import Sidebar from '@/components/Sidebar';
|
||||||
|
import { Toaster } from 'sonner';
|
||||||
|
|
||||||
const montserrat = Montserrat({
|
const montserrat = Montserrat({
|
||||||
weight: ['300', '400', '500', '700'],
|
weight: ['300', '400', '500', '700'],
|
||||||
@@ -26,6 +27,15 @@ export default function RootLayout({
|
|||||||
<html className="h-full" lang="en">
|
<html className="h-full" lang="en">
|
||||||
<body className={cn('h-full', montserrat.className)}>
|
<body className={cn('h-full', montserrat.className)}>
|
||||||
<Sidebar>{children}</Sidebar>
|
<Sidebar>{children}</Sidebar>
|
||||||
|
<Toaster
|
||||||
|
toastOptions={{
|
||||||
|
unstyled: true,
|
||||||
|
classNames: {
|
||||||
|
toast:
|
||||||
|
'bg-[#111111] text-white rounded-lg p-4 flex flex-row items-center space-x-2',
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { Document } from '@langchain/core/documents';
|
|||||||
import Navbar from './Navbar';
|
import Navbar from './Navbar';
|
||||||
import Chat from './Chat';
|
import Chat from './Chat';
|
||||||
import EmptyChat from './EmptyChat';
|
import EmptyChat from './EmptyChat';
|
||||||
|
import { toast } from 'sonner';
|
||||||
|
|
||||||
export type Message = {
|
export type Message = {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -22,39 +23,102 @@ const useSocket = (url: string) => {
|
|||||||
const connectWs = async () => {
|
const connectWs = async () => {
|
||||||
let chatModel = localStorage.getItem('chatModel');
|
let chatModel = localStorage.getItem('chatModel');
|
||||||
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||||
|
let embeddingModel = localStorage.getItem('embeddingModel');
|
||||||
|
let embeddingModelProvider = localStorage.getItem(
|
||||||
|
'embeddingModelProvider',
|
||||||
|
);
|
||||||
|
|
||||||
if (!chatModel || !chatModelProvider) {
|
if (
|
||||||
const chatModelProviders = await fetch(
|
!chatModel ||
|
||||||
|
!chatModelProvider ||
|
||||||
|
!embeddingModel ||
|
||||||
|
!embeddingModelProvider
|
||||||
|
) {
|
||||||
|
const providers = await fetch(
|
||||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||||
).then(async (res) => (await res.json())['providers']);
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
).then(async (res) => await res.json());
|
||||||
|
|
||||||
|
const chatModelProviders = providers.chatModelProviders;
|
||||||
|
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!chatModelProviders ||
|
!chatModelProviders ||
|
||||||
Object.keys(chatModelProviders).length === 0
|
Object.keys(chatModelProviders).length === 0
|
||||||
)
|
)
|
||||||
return console.error('No chat models available');
|
return toast.error('No chat models available');
|
||||||
|
|
||||||
|
if (
|
||||||
|
!embeddingModelProviders ||
|
||||||
|
Object.keys(embeddingModelProviders).length === 0
|
||||||
|
)
|
||||||
|
return toast.error('No embedding models available');
|
||||||
|
|
||||||
chatModelProvider = Object.keys(chatModelProviders)[0];
|
chatModelProvider = Object.keys(chatModelProviders)[0];
|
||||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
|
|
||||||
|
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||||
|
embeddingModel = Object.keys(
|
||||||
|
embeddingModelProviders[embeddingModelProvider],
|
||||||
|
)[0];
|
||||||
|
|
||||||
localStorage.setItem('chatModel', chatModel!);
|
localStorage.setItem('chatModel', chatModel!);
|
||||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||||
|
localStorage.setItem('embeddingModel', embeddingModel!);
|
||||||
|
localStorage.setItem(
|
||||||
|
'embeddingModelProvider',
|
||||||
|
embeddingModelProvider,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const ws = new WebSocket(
|
const wsURL = new URL(url);
|
||||||
`${url}?chatModel=${chatModel}&chatModelProvider=${chatModelProvider}`,
|
const searchParams = new URLSearchParams({});
|
||||||
|
|
||||||
|
searchParams.append('chatModel', chatModel!);
|
||||||
|
searchParams.append('chatModelProvider', chatModelProvider);
|
||||||
|
|
||||||
|
if (chatModelProvider === 'custom_openai') {
|
||||||
|
searchParams.append(
|
||||||
|
'openAIApiKey',
|
||||||
|
localStorage.getItem('openAIApiKey')!,
|
||||||
);
|
);
|
||||||
|
searchParams.append(
|
||||||
|
'openAIBaseURL',
|
||||||
|
localStorage.getItem('openAIBaseURL')!,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
searchParams.append('embeddingModel', embeddingModel!);
|
||||||
|
searchParams.append('embeddingModelProvider', embeddingModelProvider);
|
||||||
|
|
||||||
|
wsURL.search = searchParams.toString();
|
||||||
|
|
||||||
|
const ws = new WebSocket(wsURL.toString());
|
||||||
|
|
||||||
ws.onopen = () => {
|
ws.onopen = () => {
|
||||||
console.log('[DEBUG] open');
|
console.log('[DEBUG] open');
|
||||||
setWs(ws);
|
setWs(ws);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
ws.onmessage = (e) => {
|
||||||
|
const parsedData = JSON.parse(e.data);
|
||||||
|
if (parsedData.type === 'error') {
|
||||||
|
toast.error(parsedData.data);
|
||||||
|
if (parsedData.key === 'INVALID_MODEL_SELECTED') {
|
||||||
|
localStorage.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
connectWs();
|
connectWs();
|
||||||
}
|
}
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
1;
|
|
||||||
ws?.close();
|
ws?.close();
|
||||||
console.log('[DEBUG] closed');
|
console.log('[DEBUG] closed');
|
||||||
};
|
};
|
||||||
@@ -102,6 +166,12 @@ const ChatWindow = () => {
|
|||||||
const messageHandler = (e: MessageEvent) => {
|
const messageHandler = (e: MessageEvent) => {
|
||||||
const data = JSON.parse(e.data);
|
const data = JSON.parse(e.data);
|
||||||
|
|
||||||
|
if (data.type === 'error') {
|
||||||
|
toast.error(data.data);
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (data.type === 'sources') {
|
if (data.type === 'sources') {
|
||||||
sources = data.data;
|
sources = data.data;
|
||||||
if (!added) {
|
if (!added) {
|
||||||
|
|||||||
@@ -34,15 +34,13 @@ const MessageBox = ({
|
|||||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
const regex = /\[(\d+)\]/g;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
message.role === 'assistant' &&
|
message.role === 'assistant' &&
|
||||||
message?.sources &&
|
message?.sources &&
|
||||||
message.sources.length > 0
|
message.sources.length > 0
|
||||||
) {
|
) {
|
||||||
const regex = /\[(\d+)\]/g;
|
|
||||||
|
|
||||||
setSpeechMessage(message.content.replace(regex, ''));
|
|
||||||
|
|
||||||
return setParsedMessage(
|
return setParsedMessage(
|
||||||
message.content.replace(
|
message.content.replace(
|
||||||
regex,
|
regex,
|
||||||
@@ -51,6 +49,8 @@ const MessageBox = ({
|
|||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setSpeechMessage(message.content.replace(regex, ''));
|
||||||
setParsedMessage(message.content);
|
setParsedMessage(message.content);
|
||||||
}, [message.content, message.sources, message.role]);
|
}, [message.content, message.sources, message.role]);
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ const MessageBox = ({
|
|||||||
<Markdown className="prose max-w-none break-words prose-invert prose-p:leading-relaxed prose-pre:p-0 text-white text-sm md:text-base font-medium">
|
<Markdown className="prose max-w-none break-words prose-invert prose-p:leading-relaxed prose-pre:p-0 text-white text-sm md:text-base font-medium">
|
||||||
{parsedMessage}
|
{parsedMessage}
|
||||||
</Markdown>
|
</Markdown>
|
||||||
{!loading && (
|
{loading && isLast ? null : (
|
||||||
<div className="flex flex-row items-center justify-between w-full text-white py-4 -mx-2">
|
<div className="flex flex-row items-center justify-between w-full text-white py-4 -mx-2">
|
||||||
<div className="flex flex-row items-center space-x-1">
|
<div className="flex flex-row items-center space-x-1">
|
||||||
<button className="p-2 text-white/70 rounded-xl hover:bg-[#1c1c1c] transition duration-200 hover:text-white">
|
<button className="p-2 text-white/70 rounded-xl hover:bg-[#1c1c1c] transition duration-200 hover:text-white">
|
||||||
|
|||||||
@@ -3,7 +3,10 @@ import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
|
|||||||
import React, { Fragment, useEffect, useState } from 'react';
|
import React, { Fragment, useEffect, useState } from 'react';
|
||||||
|
|
||||||
interface SettingsType {
|
interface SettingsType {
|
||||||
providers: {
|
chatModelProviders: {
|
||||||
|
[key: string]: string[];
|
||||||
|
};
|
||||||
|
embeddingModelProviders: {
|
||||||
[key: string]: string[];
|
[key: string]: string[];
|
||||||
};
|
};
|
||||||
openaiApiKey: string;
|
openaiApiKey: string;
|
||||||
@@ -25,6 +28,13 @@ const SettingsDialog = ({
|
|||||||
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
|
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
|
||||||
null,
|
null,
|
||||||
);
|
);
|
||||||
|
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] =
|
||||||
|
useState<string | null>(null);
|
||||||
|
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||||
|
string | null
|
||||||
|
>(null);
|
||||||
|
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>('');
|
||||||
|
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>('');
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [isUpdating, setIsUpdating] = useState(false);
|
const [isUpdating, setIsUpdating] = useState(false);
|
||||||
|
|
||||||
@@ -32,9 +42,54 @@ const SettingsDialog = ({
|
|||||||
if (isOpen) {
|
if (isOpen) {
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`);
|
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||||
const data = await res.json();
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = (await res.json()) as SettingsType;
|
||||||
setConfig(data);
|
setConfig(data);
|
||||||
|
|
||||||
|
const chatModelProvidersKeys = Object.keys(
|
||||||
|
data.chatModelProviders || {},
|
||||||
|
);
|
||||||
|
const embeddingModelProvidersKeys = Object.keys(
|
||||||
|
data.embeddingModelProviders || {},
|
||||||
|
);
|
||||||
|
|
||||||
|
const defaultChatModelProvider =
|
||||||
|
chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
|
||||||
|
const defaultEmbeddingModelProvider =
|
||||||
|
embeddingModelProvidersKeys.length > 0
|
||||||
|
? embeddingModelProvidersKeys[0]
|
||||||
|
: '';
|
||||||
|
|
||||||
|
const chatModelProvider =
|
||||||
|
localStorage.getItem('chatModelProvider') ||
|
||||||
|
defaultChatModelProvider ||
|
||||||
|
'';
|
||||||
|
const chatModel =
|
||||||
|
localStorage.getItem('chatModel') ||
|
||||||
|
(data.chatModelProviders &&
|
||||||
|
data.chatModelProviders[chatModelProvider]?.[0]) ||
|
||||||
|
'';
|
||||||
|
const embeddingModelProvider =
|
||||||
|
localStorage.getItem('embeddingModelProvider') ||
|
||||||
|
defaultEmbeddingModelProvider ||
|
||||||
|
'';
|
||||||
|
const embeddingModel =
|
||||||
|
localStorage.getItem('embeddingModel') ||
|
||||||
|
(data.embeddingModelProviders &&
|
||||||
|
data.embeddingModelProviders[embeddingModelProvider]?.[0]) ||
|
||||||
|
'';
|
||||||
|
|
||||||
|
setSelectedChatModelProvider(chatModelProvider);
|
||||||
|
setSelectedChatModel(chatModel);
|
||||||
|
setSelectedEmbeddingModelProvider(embeddingModelProvider);
|
||||||
|
setSelectedEmbeddingModel(embeddingModel);
|
||||||
|
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
||||||
|
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl') || '');
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -43,11 +98,6 @@ const SettingsDialog = ({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [isOpen]);
|
}, [isOpen]);
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setSelectedChatModelProvider(localStorage.getItem('chatModelProvider'));
|
|
||||||
setSelectedChatModel(localStorage.getItem('chatModel'));
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const handleSubmit = async () => {
|
const handleSubmit = async () => {
|
||||||
setIsUpdating(true);
|
setIsUpdating(true);
|
||||||
|
|
||||||
@@ -62,6 +112,13 @@ const SettingsDialog = ({
|
|||||||
|
|
||||||
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
||||||
localStorage.setItem('chatModel', selectedChatModel!);
|
localStorage.setItem('chatModel', selectedChatModel!);
|
||||||
|
localStorage.setItem(
|
||||||
|
'embeddingModelProvider',
|
||||||
|
selectedEmbeddingModelProvider!,
|
||||||
|
);
|
||||||
|
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
|
||||||
|
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
|
||||||
|
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
console.log(err);
|
||||||
} finally {
|
} finally {
|
||||||
@@ -107,7 +164,7 @@ const SettingsDialog = ({
|
|||||||
</Dialog.Title>
|
</Dialog.Title>
|
||||||
{config && !isLoading && (
|
{config && !isLoading && (
|
||||||
<div className="flex flex-col space-y-4 mt-6">
|
<div className="flex flex-col space-y-4 mt-6">
|
||||||
{config.providers && (
|
{config.chatModelProviders && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Chat model Provider
|
Chat model Provider
|
||||||
@@ -116,36 +173,47 @@ const SettingsDialog = ({
|
|||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setSelectedChatModelProvider(e.target.value);
|
setSelectedChatModelProvider(e.target.value);
|
||||||
setSelectedChatModel(
|
setSelectedChatModel(
|
||||||
config.providers[e.target.value][0],
|
config.chatModelProviders[e.target.value][0],
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
>
|
>
|
||||||
{Object.keys(config.providers).map((provider) => (
|
{Object.keys(config.chatModelProviders).map(
|
||||||
|
(provider) => (
|
||||||
<option
|
<option
|
||||||
key={provider}
|
key={provider}
|
||||||
value={provider}
|
value={provider}
|
||||||
selected={provider === selectedChatModelProvider}
|
selected={
|
||||||
|
provider === selectedChatModelProvider
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{provider.charAt(0).toUpperCase() +
|
{provider.charAt(0).toUpperCase() +
|
||||||
provider.slice(1)}
|
provider.slice(1)}
|
||||||
</option>
|
</option>
|
||||||
))}
|
),
|
||||||
|
)}
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{selectedChatModelProvider && (
|
{selectedChatModelProvider &&
|
||||||
|
selectedChatModelProvider != 'custom_openai' && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-white/70 text-sm">Chat Model</p>
|
<p className="text-white/70 text-sm">Chat Model</p>
|
||||||
<select
|
<select
|
||||||
onChange={(e) => setSelectedChatModel(e.target.value)}
|
onChange={(e) =>
|
||||||
|
setSelectedChatModel(e.target.value)
|
||||||
|
}
|
||||||
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
>
|
>
|
||||||
{config.providers[selectedChatModelProvider] ? (
|
{config.chatModelProviders[
|
||||||
config.providers[selectedChatModelProvider].length >
|
selectedChatModelProvider
|
||||||
0 ? (
|
] ? (
|
||||||
config.providers[selectedChatModelProvider].map(
|
config.chatModelProviders[
|
||||||
(model) => (
|
selectedChatModelProvider
|
||||||
|
].length > 0 ? (
|
||||||
|
config.chatModelProviders[
|
||||||
|
selectedChatModelProvider
|
||||||
|
].map((model) => (
|
||||||
<option
|
<option
|
||||||
key={model}
|
key={model}
|
||||||
value={model}
|
value={model}
|
||||||
@@ -153,8 +221,7 @@ const SettingsDialog = ({
|
|||||||
>
|
>
|
||||||
{model}
|
{model}
|
||||||
</option>
|
</option>
|
||||||
),
|
))
|
||||||
)
|
|
||||||
) : (
|
) : (
|
||||||
<option value="" disabled selected>
|
<option value="" disabled selected>
|
||||||
No models available
|
No models available
|
||||||
@@ -168,6 +235,122 @@ const SettingsDialog = ({
|
|||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{selectedChatModelProvider &&
|
||||||
|
selectedChatModelProvider === 'custom_openai' && (
|
||||||
|
<>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-white/70 text-sm">Model name</p>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Model name"
|
||||||
|
defaultValue={selectedChatModel!}
|
||||||
|
onChange={(e) =>
|
||||||
|
setSelectedChatModel(e.target.value)
|
||||||
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-white/70 text-sm">
|
||||||
|
Custom OpenAI API Key
|
||||||
|
</p>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Custom OpenAI API Key"
|
||||||
|
defaultValue={customOpenAIApiKey!}
|
||||||
|
onChange={(e) =>
|
||||||
|
setCustomOpenAIApiKey(e.target.value)
|
||||||
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-white/70 text-sm">
|
||||||
|
Custom OpenAI Base URL
|
||||||
|
</p>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Custom OpenAI Base URL"
|
||||||
|
defaultValue={customOpenAIBaseURL!}
|
||||||
|
onChange={(e) =>
|
||||||
|
setCustomOpenAIBaseURL(e.target.value)
|
||||||
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{/* Embedding models */}
|
||||||
|
{config.embeddingModelProviders && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-white/70 text-sm">
|
||||||
|
Embedding model Provider
|
||||||
|
</p>
|
||||||
|
<select
|
||||||
|
onChange={(e) => {
|
||||||
|
setSelectedEmbeddingModelProvider(e.target.value);
|
||||||
|
setSelectedEmbeddingModel(
|
||||||
|
config.embeddingModelProviders[e.target.value][0],
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
|
>
|
||||||
|
{Object.keys(config.embeddingModelProviders).map(
|
||||||
|
(provider) => (
|
||||||
|
<option
|
||||||
|
key={provider}
|
||||||
|
value={provider}
|
||||||
|
selected={
|
||||||
|
provider === selectedEmbeddingModelProvider
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{provider.charAt(0).toUpperCase() +
|
||||||
|
provider.slice(1)}
|
||||||
|
</option>
|
||||||
|
),
|
||||||
|
)}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{selectedEmbeddingModelProvider && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-white/70 text-sm">Embedding Model</p>
|
||||||
|
<select
|
||||||
|
onChange={(e) =>
|
||||||
|
setSelectedEmbeddingModel(e.target.value)
|
||||||
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
|
>
|
||||||
|
{config.embeddingModelProviders[
|
||||||
|
selectedEmbeddingModelProvider
|
||||||
|
] ? (
|
||||||
|
config.embeddingModelProviders[
|
||||||
|
selectedEmbeddingModelProvider
|
||||||
|
].length > 0 ? (
|
||||||
|
config.embeddingModelProviders[
|
||||||
|
selectedEmbeddingModelProvider
|
||||||
|
].map((model) => (
|
||||||
|
<option
|
||||||
|
key={model}
|
||||||
|
value={model}
|
||||||
|
selected={model === selectedEmbeddingModel}
|
||||||
|
>
|
||||||
|
{model}
|
||||||
|
</option>
|
||||||
|
))
|
||||||
|
) : (
|
||||||
|
<option value="" disabled selected>
|
||||||
|
No embedding models available
|
||||||
|
</option>
|
||||||
|
)
|
||||||
|
) : (
|
||||||
|
<option value="" disabled selected>
|
||||||
|
Invalid provider, please check backend logs
|
||||||
|
</option>
|
||||||
|
)}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-white/70 text-sm">OpenAI API Key</p>
|
<p className="text-white/70 text-sm">OpenAI API Key</p>
|
||||||
<input
|
<input
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-frontend",
|
"name": "perplexica-frontend",
|
||||||
"version": "1.2.0",
|
"version": "1.3.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -22,8 +22,9 @@
|
|||||||
"next": "14.1.4",
|
"next": "14.1.4",
|
||||||
"react": "^18",
|
"react": "^18",
|
||||||
"react-dom": "^18",
|
"react-dom": "^18",
|
||||||
"react-speech-kit": "^3.0.1",
|
"react-text-to-speech": "^0.14.5",
|
||||||
"react-textarea-autosize": "^8.5.3",
|
"react-textarea-autosize": "^8.5.3",
|
||||||
|
"sonner": "^1.4.41",
|
||||||
"tailwind-merge": "^2.2.2",
|
"tailwind-merge": "^2.2.2",
|
||||||
"yet-another-react-lightbox": "^3.17.2",
|
"yet-another-react-lightbox": "^3.17.2",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
|
|||||||
13
ui/yarn.lock
13
ui/yarn.lock
@@ -2632,10 +2632,10 @@ react-is@^16.13.1:
|
|||||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||||
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
|
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
|
||||||
|
|
||||||
react-speech-kit@^3.0.1:
|
react-text-to-speech@^0.14.5:
|
||||||
version "3.0.1"
|
version "0.14.5"
|
||||||
resolved "https://registry.yarnpkg.com/react-speech-kit/-/react-speech-kit-3.0.1.tgz#8bd936adfe064be1c5a07e2992dfdfd772e80d14"
|
resolved "https://registry.yarnpkg.com/react-text-to-speech/-/react-text-to-speech-0.14.5.tgz#f918786ab283311535682011045bd49777193300"
|
||||||
integrity sha512-MXNOciISanhmnxpHJkBOev3M3NPDpW1T7nTc/eGw5pO9cXpoUccRxZkmr/IlpTPbPEneDNeTmbwri/YweyctZg==
|
integrity sha512-3brr/IrK/5YTtOZSTo+Y8b+dnWelzfZiDZvkXnOct1e7O7fgA/h9bYAVrtwSRo/VxKfdw+wh6glkj6M0mlQuQQ==
|
||||||
|
|
||||||
react-textarea-autosize@^8.5.3:
|
react-textarea-autosize@^8.5.3:
|
||||||
version "8.5.3"
|
version "8.5.3"
|
||||||
@@ -2839,6 +2839,11 @@ slash@^3.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
||||||
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
||||||
|
|
||||||
|
sonner@^1.4.41:
|
||||||
|
version "1.4.41"
|
||||||
|
resolved "https://registry.yarnpkg.com/sonner/-/sonner-1.4.41.tgz#ff085ae4f4244713daf294959beaa3e90f842d2c"
|
||||||
|
integrity sha512-uG511ggnnsw6gcn/X+YKkWPo5ep9il9wYi3QJxHsYe7yTZ4+cOd1wuodOUmOpFuXL+/RE3R04LczdNCDygTDgQ==
|
||||||
|
|
||||||
source-map-js@^1.0.2, source-map-js@^1.2.0:
|
source-map-js@^1.0.2, source-map-js@^1.2.0:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
|
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
|
||||||
|
|||||||
@@ -1291,11 +1291,6 @@ raw-body@2.5.2:
|
|||||||
iconv-lite "0.4.24"
|
iconv-lite "0.4.24"
|
||||||
unpipe "1.0.0"
|
unpipe "1.0.0"
|
||||||
|
|
||||||
react-text-to-speech@^0.14.5:
|
|
||||||
version "0.14.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/react-text-to-speech/-/react-text-to-speech-0.14.5.tgz#f918786ab283311535682011045bd49777193300"
|
|
||||||
integrity sha512-3brr/IrK/5YTtOZSTo+Y8b+dnWelzfZiDZvkXnOct1e7O7fgA/h9bYAVrtwSRo/VxKfdw+wh6glkj6M0mlQuQQ==
|
|
||||||
|
|
||||||
readable-stream@^3.4.0, readable-stream@^3.6.0:
|
readable-stream@^3.4.0, readable-stream@^3.6.0:
|
||||||
version "3.6.2"
|
version "3.6.2"
|
||||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
|
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
|
||||||
|
|||||||
Reference in New Issue
Block a user