mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-21 20:48:14 +00:00
Compare commits
42 Commits
v1.11.0
...
1da9b7655c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1da9b7655c | ||
|
|
2edef888a3 | ||
|
|
2dc8078848 | ||
|
|
8df81c20cf | ||
|
|
34bd02236d | ||
|
|
2430376a0c | ||
|
|
bd5628b390 | ||
|
|
3d5d04eda0 | ||
|
|
07a17925b1 | ||
|
|
3bcf646af1 | ||
|
|
e499c0b96e | ||
|
|
33b736e1e8 | ||
|
|
5e1746f646 | ||
|
|
41fe009847 | ||
|
|
1a8889c71c | ||
|
|
70c1f7230c | ||
|
|
c0771095a6 | ||
|
|
0856896aff | ||
|
|
3da53aed03 | ||
|
|
244675759c | ||
|
|
ce6a37aaff | ||
|
|
c3abba8462 | ||
|
|
f709aa8224 | ||
|
|
22695f4ef6 | ||
|
|
75ef2e0282 | ||
|
|
b0d97c4c83 | ||
|
|
6527388e25 | ||
|
|
7397e33f29 | ||
|
|
f6ffa9ebe0 | ||
|
|
f9e675823b | ||
|
|
2e736613c5 | ||
|
|
295334b195 | ||
|
|
b106abd77f | ||
|
|
2d80fc400d | ||
|
|
097a5c55c6 | ||
|
|
d0719429b4 | ||
|
|
600d4ceb29 | ||
|
|
4f50462f1d | ||
|
|
231bc22a36 | ||
|
|
046daf442a | ||
|
|
cb1d85e458 | ||
|
|
ce78b4ff62 |
BIN
.assets/demo.gif
Normal file
BIN
.assets/demo.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 MiB |
Binary file not shown.
|
Before Width: | Height: | Size: 16 MiB |
BIN
.assets/sponsers/exa.png
Normal file
BIN
.assets/sponsers/exa.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.5 KiB |
BIN
.assets/sponsers/warp.png
Normal file
BIN
.assets/sponsers/warp.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 433 KiB |
52
Dockerfile
52
Dockerfile
@@ -17,22 +17,11 @@ RUN yarn build
|
|||||||
|
|
||||||
FROM node:24.5.0-slim
|
FROM node:24.5.0-slim
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && apt-get install -y \
|
||||||
apt-get install -y \
|
python3-dev python3-babel python3-venv python-is-python3 \
|
||||||
python3 \
|
uwsgi uwsgi-plugin-python3 \
|
||||||
python3-pip \
|
git build-essential libxslt-dev zlib1g-dev libffi-dev libssl-dev \
|
||||||
python3-venv \
|
curl sudo \
|
||||||
python3-dev \
|
|
||||||
sqlite3 \
|
|
||||||
git \
|
|
||||||
build-essential \
|
|
||||||
libxslt-dev \
|
|
||||||
zlib1g-dev \
|
|
||||||
libffi-dev \
|
|
||||||
libssl-dev \
|
|
||||||
uwsgi \
|
|
||||||
uwsgi-plugin-python3 \
|
|
||||||
curl \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /home/perplexica
|
WORKDIR /home/perplexica
|
||||||
@@ -45,28 +34,39 @@ COPY drizzle ./drizzle
|
|||||||
|
|
||||||
RUN mkdir /home/perplexica/uploads
|
RUN mkdir /home/perplexica/uploads
|
||||||
|
|
||||||
RUN useradd --system --home-dir /usr/local/searxng --shell /bin/sh searxng
|
RUN useradd --shell /bin/bash --system \
|
||||||
|
--home-dir "/usr/local/searxng" \
|
||||||
WORKDIR /usr/local/searxng
|
--comment 'Privacy-respecting metasearch engine' \
|
||||||
RUN git clone https://github.com/searxng/searxng.git . && \
|
searxng
|
||||||
python3 -m venv venv && \
|
|
||||||
. venv/bin/activate && \
|
|
||||||
pip install --upgrade pip setuptools wheel pyyaml && \
|
|
||||||
pip install -r requirements.txt && \
|
|
||||||
pip install uwsgi
|
|
||||||
|
|
||||||
|
RUN mkdir "/usr/local/searxng"
|
||||||
RUN mkdir -p /etc/searxng
|
RUN mkdir -p /etc/searxng
|
||||||
|
RUN chown -R "searxng:searxng" "/usr/local/searxng"
|
||||||
|
|
||||||
COPY searxng/settings.yml /etc/searxng/settings.yml
|
COPY searxng/settings.yml /etc/searxng/settings.yml
|
||||||
COPY searxng/limiter.toml /etc/searxng/limiter.toml
|
COPY searxng/limiter.toml /etc/searxng/limiter.toml
|
||||||
COPY searxng/uwsgi.ini /etc/searxng/uwsgi.ini
|
COPY searxng/uwsgi.ini /etc/searxng/uwsgi.ini
|
||||||
|
RUN chown -R searxng:searxng /etc/searxng
|
||||||
|
|
||||||
RUN chown -R searxng:searxng /usr/local/searxng /etc/searxng
|
USER searxng
|
||||||
|
|
||||||
|
RUN git clone "https://github.com/searxng/searxng" \
|
||||||
|
"/usr/local/searxng/searxng-src"
|
||||||
|
|
||||||
|
RUN python3 -m venv "/usr/local/searxng/searx-pyenv"
|
||||||
|
RUN "/usr/local/searxng/searx-pyenv/bin/pip" install --upgrade pip setuptools wheel pyyaml msgspec
|
||||||
|
RUN cd "/usr/local/searxng/searxng-src" && \
|
||||||
|
"/usr/local/searxng/searx-pyenv/bin/pip" install --use-pep517 --no-build-isolation -e .
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
WORKDIR /home/perplexica
|
WORKDIR /home/perplexica
|
||||||
COPY entrypoint.sh ./entrypoint.sh
|
COPY entrypoint.sh ./entrypoint.sh
|
||||||
RUN chmod +x ./entrypoint.sh
|
RUN chmod +x ./entrypoint.sh
|
||||||
RUN sed -i 's/\r$//' ./entrypoint.sh || true
|
RUN sed -i 's/\r$//' ./entrypoint.sh || true
|
||||||
|
|
||||||
|
RUN echo "searxng ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||||
|
|
||||||
EXPOSE 3000 8080
|
EXPOSE 3000 8080
|
||||||
|
|
||||||
ENV SEARXNG_API_URL=http://localhost:8080
|
ENV SEARXNG_API_URL=http://localhost:8080
|
||||||
|
|||||||
130
README.md
130
README.md
@@ -1,74 +1,76 @@
|
|||||||
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc -->
|
# Perplexica 🔍
|
||||||
|
|
||||||
<div align="center" markdown="1">
|
|
||||||
<sup>Special thanks to:</sup>
|
|
||||||
<br>
|
|
||||||
<br>
|
|
||||||
<a href="https://www.warp.dev/perplexica">
|
|
||||||
<img alt="Warp sponsorship" width="400" src="https://github.com/user-attachments/assets/775dd593-9b5f-40f1-bf48-479faff4c27b">
|
|
||||||
</a>
|
|
||||||
|
|
||||||
### [Warp, the AI Devtool that lives in your terminal](https://www.warp.dev/perplexica)
|
|
||||||
|
|
||||||
[Available for MacOS, Linux, & Windows](https://www.warp.dev/perplexica)
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<hr/>
|
|
||||||
|
|
||||||
|
[](https://github.com/ItzCrazyKns/Perplexica/stargazers)
|
||||||
|
[](https://github.com/ItzCrazyKns/Perplexica/network/members)
|
||||||
|
[](https://github.com/ItzCrazyKns/Perplexica/watchers)
|
||||||
|
[](https://hub.docker.com/r/itzcrazykns1337/perplexica)
|
||||||
|
[](https://github.com/ItzCrazyKns/Perplexica/blob/master/LICENSE)
|
||||||
|
[](https://github.com/ItzCrazyKns/Perplexica/commits/master)
|
||||||
[](https://discord.gg/26aArMy8tT)
|
[](https://discord.gg/26aArMy8tT)
|
||||||
|
|
||||||

|
Perplexica is a **privacy-focused AI answering engine** that runs entirely on your own hardware. It combines knowledge from the vast internet with support for **local LLMs** (Ollama) and cloud providers (OpenAI, Claude, Groq), delivering accurate answers with **cited sources** while keeping your searches completely private.
|
||||||
|
|
||||||
## Table of Contents <!-- omit in toc -->
|

|
||||||
|
|
||||||
- [Overview](#overview)
|
|
||||||
- [Preview](#preview)
|
|
||||||
- [Features](#features)
|
|
||||||
- [Installation](#installation)
|
|
||||||
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
|
||||||
- [Non-Docker Installation](#non-docker-installation)
|
|
||||||
- [Ollama Connection Errors](#ollama-connection-errors)
|
|
||||||
- [Lemonade Connection Errors](#lemonade-connection-errors)
|
|
||||||
- [Using as a Search Engine](#using-as-a-search-engine)
|
|
||||||
- [Using Perplexica's API](#using-perplexicas-api)
|
|
||||||
- [Expose Perplexica to a network](#expose-perplexica-to-network)
|
|
||||||
- [One-Click Deployment](#one-click-deployment)
|
|
||||||
- [Upcoming Features](#upcoming-features)
|
|
||||||
- [Support Us](#support-us)
|
|
||||||
- [Donations](#donations)
|
|
||||||
- [Contribution](#contribution)
|
|
||||||
- [Help and Support](#help-and-support)
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Perplexica is an open-source AI-powered searching tool or an AI-powered search engine that goes deep into the internet to find answers. Inspired by Perplexity AI, it's an open-source option that not just searches the web but understands your questions. It uses advanced machine learning algorithms like similarity searching and embeddings to refine results and provides clear answers with sources cited.
|
|
||||||
|
|
||||||
Using SearxNG to stay current and fully open source, Perplexica ensures you always get the most up-to-date information without compromising your privacy.
|
|
||||||
|
|
||||||
Want to know more about its architecture and how it works? You can read it [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/README.md).
|
Want to know more about its architecture and how it works? You can read it [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/README.md).
|
||||||
|
|
||||||
## Preview
|
## ✨ Features
|
||||||
|
|
||||||

|
🤖 **Support for all major AI providers** - Use local LLMs through Ollama or connect to OpenAI, Anthropic Claude, Google Gemini, Groq, and more. Mix and match models based on your needs.
|
||||||
|
|
||||||
## Features
|
⚡ **Smart search modes** - Choose Balanced Mode for everyday searches, Fast Mode when you need quick answers, or wait for Quality Mode (coming soon) for deep research.
|
||||||
|
|
||||||
- **Local LLMs**: You can utilize local LLMs such as Qwen, DeepSeek, Llama, and Mistral.
|
🎯 **Six specialized focus modes** - Get better results with modes designed for specific tasks: Academic papers, YouTube videos, Reddit discussions, Wolfram Alpha calculations, writing assistance, or general web search.
|
||||||
- **Two Main Modes:**
|
|
||||||
- **Copilot Mode:** (In development) Boosts search by generating different queries to find more relevant internet sources. Like normal search instead of just using the context by SearxNG, it visits the top matches and tries to find relevant sources to the user's query directly from the page.
|
|
||||||
- **Normal Mode:** Processes your query and performs a web search.
|
|
||||||
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
|
|
||||||
- **All Mode:** Searches the entire web to find the best results.
|
|
||||||
- **Writing Assistant Mode:** Helpful for writing tasks that do not require searching the web.
|
|
||||||
- **Academic Search Mode:** Finds articles and papers, ideal for academic research.
|
|
||||||
- **YouTube Search Mode:** Finds YouTube videos based on the search query.
|
|
||||||
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
|
|
||||||
- **Reddit Search Mode:** Searches Reddit for discussions and opinions related to the query.
|
|
||||||
- **Current Information:** Some search tools might give you outdated info because they use data from crawling bots and convert them into embeddings and store them in a index. Unlike them, Perplexica uses SearxNG, a metasearch engine to get the results and rerank and get the most relevant source out of it, ensuring you always get the latest information without the overhead of daily data updates.
|
|
||||||
- **API**: Integrate Perplexica into your existing applications and make use of its capibilities.
|
|
||||||
|
|
||||||
It has many more features like image and video search. Some of the planned features are mentioned in [upcoming features](#upcoming-features).
|
🔍 **Web search powered by SearxNG** - Access multiple search engines while keeping your identity private. Support for Tavily and Exa coming soon for even better results.
|
||||||
|
|
||||||
|
📷 **Image and video search** - Find visual content alongside text results. Search isn't limited to just articles anymore.
|
||||||
|
|
||||||
|
📄 **File uploads** - Upload documents and ask questions about them. PDFs, text files, images - Perplexica understands them all.
|
||||||
|
|
||||||
|
🌐 **Search specific domains** - Limit your search to specific websites when you know where to look. Perfect for technical documentation or research papers.
|
||||||
|
|
||||||
|
💡 **Smart suggestions** - Get intelligent search suggestions as you type, helping you formulate better queries.
|
||||||
|
|
||||||
|
📚 **Discover** - Browse interesting articles and trending content throughout the day. Stay informed without even searching.
|
||||||
|
|
||||||
|
🕒 **Search history** - Every search is saved locally so you can revisit your discoveries anytime. Your research is never lost.
|
||||||
|
|
||||||
|
✨ **More coming soon** - We're actively developing new features based on community feedback. Join our Discord to help shape Perplexica's future!
|
||||||
|
|
||||||
|
## Sponsors
|
||||||
|
|
||||||
|
Perplexica's development is powered by the generous support of our sponsors. Their contributions help keep this project free, open-source, and accessible to everyone.
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|
|
||||||
|
<a href="https://www.warp.dev/perplexica">
|
||||||
|
<img alt="Warp Terminal" src=".assets/sponsers/warp.png" width="100%">
|
||||||
|
</a>
|
||||||
|
|
||||||
|
### **✨ [Try Warp - The AI-Powered Terminal →](https://www.warp.dev/perplexica)**
|
||||||
|
|
||||||
|
Warp is revolutionizing development workflows with AI-powered features, modern UX, and blazing-fast performance. Used by developers at top companies worldwide.
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
We'd also like to thank the following partners for their generous support:
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<a href="https://dashboard.exa.ai" target="_blank">
|
||||||
|
<img src=".assets/sponsers/exa.png" alt="Exa" style="max-width: 8rem; max-height: 8rem; border-radius: .75rem;" />
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<a href="https://dashboard.exa.ai">Exa</a> • The Perfect Web Search API for LLMs - web search, crawling, deep research, and answer APIs
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -79,19 +81,19 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
|||||||
Perplexica can be easily run using Docker. Simply run the following command:
|
Perplexica can be easily run using Docker. Simply run the following command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -p 3000:3000 --name perplexica itzcrazykns1337/perplexica:latest
|
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
This will pull and start the Perplexica container with the bundled SearxNG search engine. Once running, open your browser and navigate to http://localhost:3000. You can then configure your settings (API keys, models, etc.) directly in the setup screen.
|
This will pull and start the Perplexica container with the bundled SearxNG search engine. Once running, open your browser and navigate to http://localhost:3000. You can then configure your settings (API keys, models, etc.) directly in the setup screen.
|
||||||
|
|
||||||
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required.
|
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required. The `-v` flags create persistent volumes for your data and uploaded files.
|
||||||
|
|
||||||
#### Using Perplexica with Your Own SearxNG Instance
|
#### Using Perplexica with Your Own SearxNG Instance
|
||||||
|
|
||||||
If you already have SearxNG running, you can use the slim version of Perplexica:
|
If you already have SearxNG running, you can use the slim version of Perplexica:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 --name perplexica itzcrazykns1337/perplexica:slim-latest
|
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||||
```
|
```
|
||||||
|
|
||||||
**Important**: Make sure your SearxNG instance has:
|
**Important**: Make sure your SearxNG instance has:
|
||||||
@@ -118,7 +120,7 @@ If you prefer to build from source or need more control:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t perplexica .
|
docker build -t perplexica .
|
||||||
docker run -p 3000:3000 --name perplexica perplexica
|
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica perplexica
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Access Perplexica at http://localhost:3000 and configure your settings in the setup screen.
|
5. Access Perplexica at http://localhost:3000 and configure your settings in the setup screen.
|
||||||
|
|||||||
15
docker-compose.yaml
Normal file
15
docker-compose.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
services:
|
||||||
|
perplexica:
|
||||||
|
image: itzcrazykns1337/perplexica:latest
|
||||||
|
ports:
|
||||||
|
- '3000:3000'
|
||||||
|
volumes:
|
||||||
|
- data:/home/perplexica/data
|
||||||
|
- uploads:/home/perplexica/uploads
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
data:
|
||||||
|
name: 'perplexica-data'
|
||||||
|
uploads:
|
||||||
|
name: 'perplexica-uploads'
|
||||||
@@ -17,6 +17,7 @@ Before making search requests, you'll need to get the available providers and th
|
|||||||
Returns a list of all active providers with their available chat and embedding models.
|
Returns a list of all active providers with their available chat and embedding models.
|
||||||
|
|
||||||
**Response Example:**
|
**Response Example:**
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"providers": [
|
"providers": [
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ Simply pull the latest image and restart your container:
|
|||||||
docker pull itzcrazykns1337/perplexica:latest
|
docker pull itzcrazykns1337/perplexica:latest
|
||||||
docker stop perplexica
|
docker stop perplexica
|
||||||
docker rm perplexica
|
docker rm perplexica
|
||||||
docker run -p 3000:3000 --name perplexica itzcrazykns1337/perplexica:latest
|
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
For slim version:
|
For slim version:
|
||||||
@@ -19,7 +19,7 @@ For slim version:
|
|||||||
docker pull itzcrazykns1337/perplexica:slim-latest
|
docker pull itzcrazykns1337/perplexica:slim-latest
|
||||||
docker stop perplexica
|
docker stop perplexica
|
||||||
docker rm perplexica
|
docker rm perplexica
|
||||||
docker run -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 --name perplexica itzcrazykns1337/perplexica:slim-latest
|
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||||
```
|
```
|
||||||
|
|
||||||
Once updated, go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.
|
Once updated, go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.
|
||||||
|
|||||||
@@ -1,24 +1,32 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
cd /usr/local/searxng
|
|
||||||
export SEARXNG_SETTINGS_PATH=/etc/searxng/settings.yml
|
|
||||||
|
|
||||||
# Start SearXNG in background with all output redirected to /dev/null
|
|
||||||
/usr/local/searxng/venv/bin/uwsgi \
|
|
||||||
--http-socket 0.0.0.0:8080 \
|
|
||||||
--ini /etc/searxng/uwsgi.ini \
|
|
||||||
--virtualenv /usr/local/searxng/venv \
|
|
||||||
--disable-logging > /dev/null 2>&1 &
|
|
||||||
|
|
||||||
echo "Starting SearXNG..."
|
echo "Starting SearXNG..."
|
||||||
|
|
||||||
|
sudo -H -u searxng bash -c "cd /usr/local/searxng/searxng-src && export SEARXNG_SETTINGS_PATH='/etc/searxng/settings.yml' && export FLASK_APP=searx/webapp.py && /usr/local/searxng/searx-pyenv/bin/python -m flask run --host=0.0.0.0 --port=8080" &
|
||||||
|
SEARXNG_PID=$!
|
||||||
|
|
||||||
|
echo "Waiting for SearXNG to be ready..."
|
||||||
sleep 5
|
sleep 5
|
||||||
|
|
||||||
|
COUNTER=0
|
||||||
|
MAX_TRIES=30
|
||||||
until curl -s http://localhost:8080 > /dev/null 2>&1; do
|
until curl -s http://localhost:8080 > /dev/null 2>&1; do
|
||||||
|
COUNTER=$((COUNTER+1))
|
||||||
|
if [ $COUNTER -ge $MAX_TRIES ]; then
|
||||||
|
echo "Warning: SearXNG health check timeout, but continuing..."
|
||||||
|
break
|
||||||
|
fi
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
echo "SearXNG started successfully"
|
|
||||||
|
if curl -s http://localhost:8080 > /dev/null 2>&1; then
|
||||||
|
echo "SearXNG started successfully (PID: $SEARXNG_PID)"
|
||||||
|
else
|
||||||
|
echo "SearXNG may not be fully ready, but continuing (PID: $SEARXNG_PID)"
|
||||||
|
fi
|
||||||
|
|
||||||
cd /home/perplexica
|
cd /home/perplexica
|
||||||
echo "Starting Perplexica..."
|
echo "Starting Perplexica..."
|
||||||
|
|
||||||
exec node server.js
|
exec node server.js
|
||||||
21
package.json
21
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "perplexica-frontend",
|
"name": "perplexica-frontend",
|
||||||
"version": "1.11.0",
|
"version": "1.11.2",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -16,13 +16,14 @@
|
|||||||
"@huggingface/transformers": "^3.7.5",
|
"@huggingface/transformers": "^3.7.5",
|
||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||||
"@langchain/anthropic": "^1.0.0",
|
"@langchain/anthropic": "^1.0.1",
|
||||||
"@langchain/community": "^1.0.0",
|
"@langchain/community": "^1.0.3",
|
||||||
"@langchain/core": "^1.0.1",
|
"@langchain/core": "^1.0.5",
|
||||||
"@langchain/google-genai": "^1.0.0",
|
"@langchain/google-genai": "^1.0.1",
|
||||||
"@langchain/groq": "^1.0.0",
|
"@langchain/groq": "^1.0.1",
|
||||||
"@langchain/ollama": "^1.0.0",
|
"@langchain/langgraph": "^1.0.1",
|
||||||
"@langchain/openai": "^1.0.0",
|
"@langchain/ollama": "^1.0.1",
|
||||||
|
"@langchain/openai": "^1.1.1",
|
||||||
"@langchain/textsplitters": "^1.0.0",
|
"@langchain/textsplitters": "^1.0.0",
|
||||||
"@tailwindcss/typography": "^0.5.12",
|
"@tailwindcss/typography": "^0.5.12",
|
||||||
"axios": "^1.8.3",
|
"axios": "^1.8.3",
|
||||||
@@ -33,7 +34,7 @@
|
|||||||
"framer-motion": "^12.23.24",
|
"framer-motion": "^12.23.24",
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"jspdf": "^3.0.1",
|
"jspdf": "^3.0.1",
|
||||||
"langchain": "^1.0.1",
|
"langchain": "^1.0.4",
|
||||||
"lucide-react": "^0.363.0",
|
"lucide-react": "^0.363.0",
|
||||||
"mammoth": "^1.9.1",
|
"mammoth": "^1.9.1",
|
||||||
"markdown-to-jsx": "^7.7.2",
|
"markdown-to-jsx": "^7.7.2",
|
||||||
@@ -48,7 +49,7 @@
|
|||||||
"tailwind-merge": "^2.2.2",
|
"tailwind-merge": "^2.2.2",
|
||||||
"winston": "^3.17.0",
|
"winston": "^3.17.0",
|
||||||
"yet-another-react-lightbox": "^3.17.2",
|
"yet-another-react-lightbox": "^3.17.2",
|
||||||
"zod": "^3.22.4"
|
"zod": "^4.1.12"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/better-sqlite3": "^7.6.12",
|
"@types/better-sqlite3": "^7.6.12",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import handleImageSearch from '@/lib/chains/imageSearchAgent';
|
import searchImages from '@/lib/agents/media/image';
|
||||||
import ModelRegistry from '@/lib/models/registry';
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
import { ModelWithProvider } from '@/lib/models/types';
|
import { ModelWithProvider } from '@/lib/models/types';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
@@ -13,6 +13,13 @@ export const POST = async (req: Request) => {
|
|||||||
try {
|
try {
|
||||||
const body: ImageSearchBody = await req.json();
|
const body: ImageSearchBody = await req.json();
|
||||||
|
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const llm = await registry.loadChatModel(
|
||||||
|
body.chatModel.providerId,
|
||||||
|
body.chatModel.key,
|
||||||
|
);
|
||||||
|
|
||||||
const chatHistory = body.chatHistory
|
const chatHistory = body.chatHistory
|
||||||
.map((msg: any) => {
|
.map((msg: any) => {
|
||||||
if (msg.role === 'user') {
|
if (msg.role === 'user') {
|
||||||
@@ -23,16 +30,9 @@ export const POST = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
const images = await searchImages(
|
||||||
|
|
||||||
const llm = await registry.loadChatModel(
|
|
||||||
body.chatModel.providerId,
|
|
||||||
body.chatModel.key,
|
|
||||||
);
|
|
||||||
|
|
||||||
const images = await handleImageSearch(
|
|
||||||
{
|
{
|
||||||
chat_history: chatHistory,
|
chatHistory: chatHistory,
|
||||||
query: body.query,
|
query: body.query,
|
||||||
},
|
},
|
||||||
llm,
|
llm,
|
||||||
|
|||||||
@@ -30,12 +30,6 @@ export const POST = async (req: Request) => {
|
|||||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||||
body.stream = body.stream || false;
|
body.stream = body.stream || false;
|
||||||
|
|
||||||
const history: BaseMessage[] = body.history.map((msg) => {
|
|
||||||
return msg[0] === 'human'
|
|
||||||
? new HumanMessage({ content: msg[1] })
|
|
||||||
: new AIMessage({ content: msg[1] });
|
|
||||||
});
|
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
const [llm, embeddings] = await Promise.all([
|
const [llm, embeddings] = await Promise.all([
|
||||||
@@ -46,6 +40,12 @@ export const POST = async (req: Request) => {
|
|||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
const history: BaseMessage[] = body.history.map((msg) => {
|
||||||
|
return msg[0] === 'human'
|
||||||
|
? new HumanMessage({ content: msg[1] })
|
||||||
|
: new AIMessage({ content: msg[1] });
|
||||||
|
});
|
||||||
|
|
||||||
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
||||||
|
|
||||||
if (!searchHandler) {
|
if (!searchHandler) {
|
||||||
@@ -128,7 +128,7 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
controller.close();
|
controller.close();
|
||||||
} catch (error) {}
|
} catch (error) { }
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('data', (data: string) => {
|
emitter.on('data', (data: string) => {
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent';
|
import generateSuggestions from '@/lib/agents/suggestions';
|
||||||
import ModelRegistry from '@/lib/models/registry';
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
import { ModelWithProvider } from '@/lib/models/types';
|
import { ModelWithProvider } from '@/lib/models/types';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
|
|
||||||
interface SuggestionsGenerationBody {
|
interface SuggestionsGenerationBody {
|
||||||
@@ -13,6 +12,13 @@ export const POST = async (req: Request) => {
|
|||||||
try {
|
try {
|
||||||
const body: SuggestionsGenerationBody = await req.json();
|
const body: SuggestionsGenerationBody = await req.json();
|
||||||
|
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const llm = await registry.loadChatModel(
|
||||||
|
body.chatModel.providerId,
|
||||||
|
body.chatModel.key,
|
||||||
|
);
|
||||||
|
|
||||||
const chatHistory = body.chatHistory
|
const chatHistory = body.chatHistory
|
||||||
.map((msg: any) => {
|
.map((msg: any) => {
|
||||||
if (msg.role === 'user') {
|
if (msg.role === 'user') {
|
||||||
@@ -23,16 +29,9 @@ export const POST = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
|
||||||
|
|
||||||
const llm = await registry.loadChatModel(
|
|
||||||
body.chatModel.providerId,
|
|
||||||
body.chatModel.key,
|
|
||||||
);
|
|
||||||
|
|
||||||
const suggestions = await generateSuggestions(
|
const suggestions = await generateSuggestions(
|
||||||
{
|
{
|
||||||
chat_history: chatHistory,
|
chatHistory,
|
||||||
},
|
},
|
||||||
llm,
|
llm,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import handleVideoSearch from '@/lib/chains/videoSearchAgent';
|
import handleVideoSearch from '@/lib/agents/media/video';
|
||||||
import ModelRegistry from '@/lib/models/registry';
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
import { ModelWithProvider } from '@/lib/models/types';
|
import { ModelWithProvider } from '@/lib/models/types';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
@@ -13,6 +13,13 @@ export const POST = async (req: Request) => {
|
|||||||
try {
|
try {
|
||||||
const body: VideoSearchBody = await req.json();
|
const body: VideoSearchBody = await req.json();
|
||||||
|
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const llm = await registry.loadChatModel(
|
||||||
|
body.chatModel.providerId,
|
||||||
|
body.chatModel.key,
|
||||||
|
);
|
||||||
|
|
||||||
const chatHistory = body.chatHistory
|
const chatHistory = body.chatHistory
|
||||||
.map((msg: any) => {
|
.map((msg: any) => {
|
||||||
if (msg.role === 'user') {
|
if (msg.role === 'user') {
|
||||||
@@ -23,16 +30,9 @@ export const POST = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
|
||||||
|
|
||||||
const llm = await registry.loadChatModel(
|
|
||||||
body.chatModel.providerId,
|
|
||||||
body.chatModel.key,
|
|
||||||
);
|
|
||||||
|
|
||||||
const videos = await handleVideoSearch(
|
const videos = await handleVideoSearch(
|
||||||
{
|
{
|
||||||
chat_history: chatHistory,
|
chatHistory: chatHistory,
|
||||||
query: body.query,
|
query: body.query,
|
||||||
},
|
},
|
||||||
llm,
|
llm,
|
||||||
|
|||||||
@@ -1,17 +1,10 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import ChatWindow from '@/components/ChatWindow';
|
import ChatWindow from '@/components/ChatWindow';
|
||||||
import { useParams } from 'next/navigation';
|
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { ChatProvider } from '@/lib/hooks/useChat';
|
|
||||||
|
|
||||||
const Page = () => {
|
const Page = () => {
|
||||||
const { chatId }: { chatId: string } = useParams();
|
return <ChatWindow />;
|
||||||
return (
|
|
||||||
<ChatProvider id={chatId}>
|
|
||||||
<ChatWindow />
|
|
||||||
</ChatProvider>
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default Page;
|
export default Page;
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import { Toaster } from 'sonner';
|
|||||||
import ThemeProvider from '@/components/theme/Provider';
|
import ThemeProvider from '@/components/theme/Provider';
|
||||||
import configManager from '@/lib/config';
|
import configManager from '@/lib/config';
|
||||||
import SetupWizard from '@/components/Setup/SetupWizard';
|
import SetupWizard from '@/components/Setup/SetupWizard';
|
||||||
|
import { ChatProvider } from '@/lib/hooks/useChat';
|
||||||
|
|
||||||
const montserrat = Montserrat({
|
const montserrat = Montserrat({
|
||||||
weight: ['300', '400', '500', '700'],
|
weight: ['300', '400', '500', '700'],
|
||||||
@@ -36,7 +37,7 @@ export default function RootLayout({
|
|||||||
<body className={cn('h-full', montserrat.className)}>
|
<body className={cn('h-full', montserrat.className)}>
|
||||||
<ThemeProvider>
|
<ThemeProvider>
|
||||||
{setupComplete ? (
|
{setupComplete ? (
|
||||||
<>
|
<ChatProvider>
|
||||||
<Sidebar>{children}</Sidebar>
|
<Sidebar>{children}</Sidebar>
|
||||||
<Toaster
|
<Toaster
|
||||||
toastOptions={{
|
toastOptions={{
|
||||||
@@ -47,7 +48,7 @@ export default function RootLayout({
|
|||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
</>
|
</ChatProvider>
|
||||||
) : (
|
) : (
|
||||||
<SetupWizard configSections={configSections} />
|
<SetupWizard configSections={configSections} />
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
import ChatWindow from '@/components/ChatWindow';
|
import ChatWindow from '@/components/ChatWindow';
|
||||||
import { ChatProvider } from '@/lib/hooks/useChat';
|
|
||||||
import { Metadata } from 'next';
|
import { Metadata } from 'next';
|
||||||
import { Suspense } from 'react';
|
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
export const metadata: Metadata = {
|
||||||
title: 'Chat - Perplexica',
|
title: 'Chat - Perplexica',
|
||||||
@@ -9,15 +7,7 @@ export const metadata: Metadata = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const Home = () => {
|
const Home = () => {
|
||||||
return (
|
return <ChatWindow />;
|
||||||
<div>
|
|
||||||
<Suspense>
|
|
||||||
<ChatProvider>
|
|
||||||
<ChatWindow />
|
|
||||||
</ChatProvider>
|
|
||||||
</Suspense>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default Home;
|
export default Home;
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import Link from 'next/link';
|
|||||||
import NextError from 'next/error';
|
import NextError from 'next/error';
|
||||||
import { useChat } from '@/lib/hooks/useChat';
|
import { useChat } from '@/lib/hooks/useChat';
|
||||||
import Loader from './ui/Loader';
|
import Loader from './ui/Loader';
|
||||||
|
import SettingsButtonMobile from './Settings/SettingsButtonMobile';
|
||||||
|
|
||||||
export interface BaseMessage {
|
export interface BaseMessage {
|
||||||
chatId: string;
|
chatId: string;
|
||||||
@@ -56,9 +57,7 @@ const ChatWindow = () => {
|
|||||||
return (
|
return (
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
||||||
<Link href="/settings">
|
<SettingsButtonMobile />
|
||||||
<Settings className="cursor-pointer lg:hidden" />
|
|
||||||
</Link>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col items-center justify-center min-h-screen">
|
<div className="flex flex-col items-center justify-center min-h-screen">
|
||||||
<p className="dark:text-white/70 text-black/70 text-sm">
|
<p className="dark:text-white/70 text-black/70 text-sm">
|
||||||
|
|||||||
@@ -205,11 +205,11 @@ const MessageBox = ({
|
|||||||
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
||||||
<SearchImages
|
<SearchImages
|
||||||
query={section.userMessage.content}
|
query={section.userMessage.content}
|
||||||
chatHistory={chatTurns.slice(0, sectionIndex * 2)}
|
chatHistory={chatTurns}
|
||||||
messageId={section.assistantMessage.messageId}
|
messageId={section.assistantMessage.messageId}
|
||||||
/>
|
/>
|
||||||
<SearchVideos
|
<SearchVideos
|
||||||
chatHistory={chatTurns.slice(0, sectionIndex * 2)}
|
chatHistory={chatTurns}
|
||||||
query={section.userMessage.content}
|
query={section.userMessage.content}
|
||||||
messageId={section.assistantMessage.messageId}
|
messageId={section.assistantMessage.messageId}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -8,17 +8,16 @@ import {
|
|||||||
PopoverPanel,
|
PopoverPanel,
|
||||||
Transition,
|
Transition,
|
||||||
} from '@headlessui/react';
|
} from '@headlessui/react';
|
||||||
import { Fragment, useEffect, useState } from 'react';
|
import { Fragment, useEffect, useMemo, useState } from 'react';
|
||||||
import { MinimalProvider } from '@/lib/models/types';
|
import { MinimalProvider } from '@/lib/models/types';
|
||||||
|
import { useChat } from '@/lib/hooks/useChat';
|
||||||
|
|
||||||
const ModelSelector = () => {
|
const ModelSelector = () => {
|
||||||
const [providers, setProviders] = useState<MinimalProvider[]>([]);
|
const [providers, setProviders] = useState<MinimalProvider[]>([]);
|
||||||
const [isLoading, setIsLoading] = useState(true);
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
const [searchQuery, setSearchQuery] = useState('');
|
const [searchQuery, setSearchQuery] = useState('');
|
||||||
const [selectedModel, setSelectedModel] = useState<{
|
|
||||||
providerId: string;
|
const { setChatModelProvider, chatModelProvider } = useChat();
|
||||||
modelKey: string;
|
|
||||||
} | null>(null);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const loadProviders = async () => {
|
const loadProviders = async () => {
|
||||||
@@ -30,28 +29,8 @@ const ModelSelector = () => {
|
|||||||
throw new Error('Failed to fetch providers');
|
throw new Error('Failed to fetch providers');
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await res.json();
|
const data: { providers: MinimalProvider[] } = await res.json();
|
||||||
setProviders(data.providers || []);
|
setProviders(data.providers);
|
||||||
|
|
||||||
const savedProviderId = localStorage.getItem('chatModelProviderId');
|
|
||||||
const savedModelKey = localStorage.getItem('chatModelKey');
|
|
||||||
|
|
||||||
if (savedProviderId && savedModelKey) {
|
|
||||||
setSelectedModel({
|
|
||||||
providerId: savedProviderId,
|
|
||||||
modelKey: savedModelKey,
|
|
||||||
});
|
|
||||||
} else if (data.providers && data.providers.length > 0) {
|
|
||||||
const firstProvider = data.providers.find(
|
|
||||||
(p: MinimalProvider) => p.chatModels.length > 0,
|
|
||||||
);
|
|
||||||
if (firstProvider && firstProvider.chatModels[0]) {
|
|
||||||
setSelectedModel({
|
|
||||||
providerId: firstProvider.id,
|
|
||||||
modelKey: firstProvider.chatModels[0].key,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error loading providers:', error);
|
console.error('Error loading providers:', error);
|
||||||
} finally {
|
} finally {
|
||||||
@@ -62,13 +41,32 @@ const ModelSelector = () => {
|
|||||||
loadProviders();
|
loadProviders();
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const orderedProviders = useMemo(() => {
|
||||||
|
if (!chatModelProvider?.providerId) return providers;
|
||||||
|
|
||||||
|
const currentProviderIndex = providers.findIndex(
|
||||||
|
(p) => p.id === chatModelProvider.providerId,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (currentProviderIndex === -1) {
|
||||||
|
return providers;
|
||||||
|
}
|
||||||
|
|
||||||
|
const selectedProvider = providers[currentProviderIndex];
|
||||||
|
const remainingProviders = providers.filter(
|
||||||
|
(_, index) => index !== currentProviderIndex,
|
||||||
|
);
|
||||||
|
|
||||||
|
return [selectedProvider, ...remainingProviders];
|
||||||
|
}, [providers, chatModelProvider]);
|
||||||
|
|
||||||
const handleModelSelect = (providerId: string, modelKey: string) => {
|
const handleModelSelect = (providerId: string, modelKey: string) => {
|
||||||
setSelectedModel({ providerId, modelKey });
|
setChatModelProvider({ providerId, key: modelKey });
|
||||||
localStorage.setItem('chatModelProviderId', providerId);
|
localStorage.setItem('chatModelProviderId', providerId);
|
||||||
localStorage.setItem('chatModelKey', modelKey);
|
localStorage.setItem('chatModelKey', modelKey);
|
||||||
};
|
};
|
||||||
|
|
||||||
const filteredProviders = providers
|
const filteredProviders = orderedProviders
|
||||||
.map((provider) => ({
|
.map((provider) => ({
|
||||||
...provider,
|
...provider,
|
||||||
chatModels: provider.chatModels.filter(
|
chatModels: provider.chatModels.filter(
|
||||||
@@ -140,15 +138,16 @@ const ModelSelector = () => {
|
|||||||
|
|
||||||
<div className="flex flex-col px-2 py-2 space-y-0.5">
|
<div className="flex flex-col px-2 py-2 space-y-0.5">
|
||||||
{provider.chatModels.map((model) => (
|
{provider.chatModels.map((model) => (
|
||||||
<PopoverButton
|
<button
|
||||||
key={model.key}
|
key={model.key}
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
handleModelSelect(provider.id, model.key)
|
handleModelSelect(provider.id, model.key)
|
||||||
}
|
}
|
||||||
|
type="button"
|
||||||
className={cn(
|
className={cn(
|
||||||
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
|
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
|
||||||
selectedModel?.providerId === provider.id &&
|
chatModelProvider?.providerId === provider.id &&
|
||||||
selectedModel?.modelKey === model.key
|
chatModelProvider?.key === model.key
|
||||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||||
)}
|
)}
|
||||||
@@ -158,8 +157,9 @@ const ModelSelector = () => {
|
|||||||
size={15}
|
size={15}
|
||||||
className={cn(
|
className={cn(
|
||||||
'shrink-0',
|
'shrink-0',
|
||||||
selectedModel?.providerId === provider.id &&
|
chatModelProvider?.providerId ===
|
||||||
selectedModel?.modelKey === model.key
|
provider.id &&
|
||||||
|
chatModelProvider?.key === model.key
|
||||||
? 'text-sky-500'
|
? 'text-sky-500'
|
||||||
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
|
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
|
||||||
)}
|
)}
|
||||||
@@ -167,8 +167,9 @@ const ModelSelector = () => {
|
|||||||
<p
|
<p
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-sm truncate',
|
'text-sm truncate',
|
||||||
selectedModel?.providerId === provider.id &&
|
chatModelProvider?.providerId ===
|
||||||
selectedModel?.modelKey === model.key
|
provider.id &&
|
||||||
|
chatModelProvider?.key === model.key
|
||||||
? 'text-sky-500 font-medium'
|
? 'text-sky-500 font-medium'
|
||||||
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
|
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
|
||||||
)}
|
)}
|
||||||
@@ -176,7 +177,7 @@ const ModelSelector = () => {
|
|||||||
{model.name}
|
{model.name}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</PopoverButton>
|
</button>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ const AddModel = ({
|
|||||||
>
|
>
|
||||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||||
<div className="px-6 pt-6 pb-4">
|
<div className="px-6 pt-6 pb-4">
|
||||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
<h3 className="text-black/90 dark:text-white/90 font-medium text-sm">
|
||||||
Add new {type === 'chat' ? 'chat' : 'embedding'} model
|
Add new {type === 'chat' ? 'chat' : 'embedding'} model
|
||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
@@ -115,7 +115,7 @@ const AddModel = ({
|
|||||||
<input
|
<input
|
||||||
value={modelName}
|
value={modelName}
|
||||||
onChange={(e) => setModelName(e.target.value)}
|
onChange={(e) => setModelName(e.target.value)}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder="e.g., GPT-4"
|
placeholder="e.g., GPT-4"
|
||||||
type="text"
|
type="text"
|
||||||
required
|
required
|
||||||
@@ -128,7 +128,7 @@ const AddModel = ({
|
|||||||
<input
|
<input
|
||||||
value={modelKey}
|
value={modelKey}
|
||||||
onChange={(e) => setModelKey(e.target.value)}
|
onChange={(e) => setModelKey(e.target.value)}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder="e.g., gpt-4"
|
placeholder="e.g., gpt-4"
|
||||||
type="text"
|
type="text"
|
||||||
required
|
required
|
||||||
@@ -140,7 +140,7 @@ const AddModel = ({
|
|||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
disabled={loading}
|
disabled={loading}
|
||||||
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
className="px-4 py-2 rounded-lg text-[13px] bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||||
>
|
>
|
||||||
{loading ? (
|
{loading ? (
|
||||||
<Loader2 className="animate-spin" size={16} />
|
<Loader2 className="animate-spin" size={16} />
|
||||||
|
|||||||
@@ -82,10 +82,10 @@ const AddProvider = ({
|
|||||||
|
|
||||||
setProviders((prev) => [...prev, data]);
|
setProviders((prev) => [...prev, data]);
|
||||||
|
|
||||||
toast.success('Provider added successfully.');
|
toast.success('Connection added successfully.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error adding provider:', error);
|
console.error('Error adding provider:', error);
|
||||||
toast.error('Failed to add provider.');
|
toast.error('Failed to add connection.');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
setOpen(false);
|
setOpen(false);
|
||||||
@@ -96,10 +96,10 @@ const AddProvider = ({
|
|||||||
<>
|
<>
|
||||||
<button
|
<button
|
||||||
onClick={() => setOpen(true)}
|
onClick={() => setOpen(true)}
|
||||||
className="px-3 md:px-4 py-1.5 md:py-2 rounded-lg text-xs sm:text-sm border border-light-200 dark:border-dark-200 text-black dark:text-white bg-light-secondary/50 dark:bg-dark-secondary/50 hover:bg-light-secondary hover:dark:bg-dark-secondary hover:border-light-300 hover:dark:border-dark-300 flex flex-row items-center space-x-1 active:scale-95 transition duration-200"
|
className="px-3 md:px-4 py-1.5 md:py-2 rounded-lg text-xs sm:text-xs border border-light-200 dark:border-dark-200 text-black dark:text-white bg-light-secondary/50 dark:bg-dark-secondary/50 hover:bg-light-secondary hover:dark:bg-dark-secondary hover:border-light-300 hover:dark:border-dark-300 flex flex-row items-center space-x-1 active:scale-95 transition duration-200"
|
||||||
>
|
>
|
||||||
<Plus className="w-3.5 h-3.5 md:w-4 md:h-4" />
|
<Plus className="w-3.5 h-3.5 md:w-4 md:h-4" />
|
||||||
<span>Add Provider</span>
|
<span>Add Connection</span>
|
||||||
</button>
|
</button>
|
||||||
<AnimatePresence>
|
<AnimatePresence>
|
||||||
{open && (
|
{open && (
|
||||||
@@ -119,8 +119,8 @@ const AddProvider = ({
|
|||||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||||
<form onSubmit={handleSubmit} className="flex flex-col flex-1">
|
<form onSubmit={handleSubmit} className="flex flex-col flex-1">
|
||||||
<div className="px-6 pt-6 pb-4">
|
<div className="px-6 pt-6 pb-4">
|
||||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
<h3 className="text-black/90 dark:text-white/90 font-medium text-sm">
|
||||||
Add new provider
|
Add new connection
|
||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||||
@@ -128,7 +128,7 @@ const AddProvider = ({
|
|||||||
<div className="flex flex-col space-y-4">
|
<div className="flex flex-col space-y-4">
|
||||||
<div className="flex flex-col items-start space-y-2">
|
<div className="flex flex-col items-start space-y-2">
|
||||||
<label className="text-xs text-black/70 dark:text-white/70">
|
<label className="text-xs text-black/70 dark:text-white/70">
|
||||||
Select provider type
|
Select connection type
|
||||||
</label>
|
</label>
|
||||||
<Select
|
<Select
|
||||||
value={selectedProvider ?? ''}
|
value={selectedProvider ?? ''}
|
||||||
@@ -149,13 +149,13 @@ const AddProvider = ({
|
|||||||
className="flex flex-col items-start space-y-2"
|
className="flex flex-col items-start space-y-2"
|
||||||
>
|
>
|
||||||
<label className="text-xs text-black/70 dark:text-white/70">
|
<label className="text-xs text-black/70 dark:text-white/70">
|
||||||
Name*
|
Connection Name*
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
value={name}
|
value={name}
|
||||||
onChange={(e) => setName(e.target.value)}
|
onChange={(e) => setName(e.target.value)}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder={'Provider Name'}
|
placeholder={'e.g., My OpenAI Connection'}
|
||||||
type="text"
|
type="text"
|
||||||
required={true}
|
required={true}
|
||||||
/>
|
/>
|
||||||
@@ -178,7 +178,7 @@ const AddProvider = ({
|
|||||||
[field.key]: event.target.value,
|
[field.key]: event.target.value,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder={
|
placeholder={
|
||||||
(field as StringUIConfigField).placeholder
|
(field as StringUIConfigField).placeholder
|
||||||
}
|
}
|
||||||
@@ -194,12 +194,12 @@ const AddProvider = ({
|
|||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
disabled={loading}
|
disabled={loading}
|
||||||
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
className="px-4 py-2 rounded-lg text-[13px] bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||||
>
|
>
|
||||||
{loading ? (
|
{loading ? (
|
||||||
<Loader2 className="animate-spin" size={16} />
|
<Loader2 className="animate-spin" size={16} />
|
||||||
) : (
|
) : (
|
||||||
'Add Provider'
|
'Add Connection'
|
||||||
)}
|
)}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -34,10 +34,10 @@ const DeleteProvider = ({
|
|||||||
return prev.filter((p) => p.id !== modelProvider.id);
|
return prev.filter((p) => p.id !== modelProvider.id);
|
||||||
});
|
});
|
||||||
|
|
||||||
toast.success('Provider deleted successfully.');
|
toast.success('Connection deleted successfully.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error deleting provider:', error);
|
console.error('Error deleting provider:', error);
|
||||||
toast.error('Failed to delete provider.');
|
toast.error('Failed to delete connection.');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
@@ -51,7 +51,7 @@ const DeleteProvider = ({
|
|||||||
setOpen(true);
|
setOpen(true);
|
||||||
}}
|
}}
|
||||||
className="group p-1.5 rounded-md hover:bg-light-200 hover:dark:bg-dark-200 transition-colors group"
|
className="group p-1.5 rounded-md hover:bg-light-200 hover:dark:bg-dark-200 transition-colors group"
|
||||||
title="Delete provider"
|
title="Delete connection"
|
||||||
>
|
>
|
||||||
<Trash2
|
<Trash2
|
||||||
size={14}
|
size={14}
|
||||||
@@ -76,14 +76,15 @@ const DeleteProvider = ({
|
|||||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||||
<div className="px-6 pt-6 pb-4">
|
<div className="px-6 pt-6 pb-4">
|
||||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
||||||
Delete provider
|
Delete connection
|
||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||||
<div className="flex-1 overflow-y-auto px-6 py-4">
|
<div className="flex-1 overflow-y-auto px-6 py-4">
|
||||||
<p className="text-SM text-black/60 dark:text-white/60">
|
<p className="text-sm text-black/60 dark:text-white/60">
|
||||||
Are you sure you want to delete the provider "
|
Are you sure you want to delete the connection "
|
||||||
{modelProvider.name}"? This action cannot be undone.
|
{modelProvider.name}"? This action cannot be undone.
|
||||||
|
All associated models will also be removed.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="px-6 py-6 flex justify-end space-x-2">
|
<div className="px-6 py-6 flex justify-end space-x-2">
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { UIConfigField, ConfigModelProvider } from '@/lib/config/types';
|
import { UIConfigField, ConfigModelProvider } from '@/lib/config/types';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { AnimatePresence, motion } from 'framer-motion';
|
import { AnimatePresence, motion } from 'framer-motion';
|
||||||
import { AlertCircle, ChevronDown, Pencil, Trash2, X } from 'lucide-react';
|
import { AlertCircle, Plug2, Plus, Pencil, Trash2, X } from 'lucide-react';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import AddModel from './AddModelDialog';
|
import AddModel from './AddModelDialog';
|
||||||
@@ -17,7 +17,7 @@ const ModelProvider = ({
|
|||||||
fields: UIConfigField[];
|
fields: UIConfigField[];
|
||||||
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
||||||
}) => {
|
}) => {
|
||||||
const [open, setOpen] = useState(false);
|
const [open, setOpen] = useState(true);
|
||||||
|
|
||||||
const handleModelDelete = async (
|
const handleModelDelete = async (
|
||||||
type: 'chat' | 'embedding',
|
type: 'chat' | 'embedding',
|
||||||
@@ -66,150 +66,157 @@ const ModelProvider = ({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const modelCount =
|
||||||
|
modelProvider.chatModels.filter((m) => m.key !== 'error').length +
|
||||||
|
modelProvider.embeddingModels.filter((m) => m.key !== 'error').length;
|
||||||
|
const hasError =
|
||||||
|
modelProvider.chatModels.some((m) => m.key === 'error') ||
|
||||||
|
modelProvider.embeddingModels.some((m) => m.key === 'error');
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
key={modelProvider.id}
|
key={modelProvider.id}
|
||||||
className="border border-light-200 dark:border-dark-200 rounded-lg overflow-hidden"
|
className="border border-light-200 dark:border-dark-200 rounded-lg overflow-hidden bg-light-primary dark:bg-dark-primary"
|
||||||
>
|
>
|
||||||
<div
|
<div className="px-5 py-3.5 flex flex-row justify-between w-full items-center border-b border-light-200 dark:border-dark-200 bg-light-secondary/30 dark:bg-dark-secondary/30">
|
||||||
className={cn(
|
<div className="flex items-center gap-2.5">
|
||||||
'group px-5 py-4 flex flex-row justify-between w-full cursor-pointer hover:bg-light-secondary hover:dark:bg-dark-secondary transition duration-200 items-center',
|
<div className="p-1.5 rounded-md bg-sky-500/10 dark:bg-sky-500/10">
|
||||||
!open && 'rounded-lg',
|
<Plug2 size={14} className="text-sky-500" />
|
||||||
)}
|
|
||||||
onClick={() => setOpen(!open)}
|
|
||||||
>
|
|
||||||
<p className="text-sm lg:text-base text-black dark:text-white font-medium">
|
|
||||||
{modelProvider.name}
|
|
||||||
</p>
|
|
||||||
<div className="flex items-center gap-4">
|
|
||||||
<div className="flex flex-row items-center">
|
|
||||||
<UpdateProvider
|
|
||||||
fields={fields}
|
|
||||||
modelProvider={modelProvider}
|
|
||||||
setProviders={setProviders}
|
|
||||||
/>
|
|
||||||
<DeleteProvider
|
|
||||||
modelProvider={modelProvider}
|
|
||||||
setProviders={setProviders}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
<ChevronDown
|
<div className="flex flex-col">
|
||||||
size={16}
|
<p className="text-sm lg:text-sm text-black dark:text-white font-medium">
|
||||||
className={cn(
|
{modelProvider.name}
|
||||||
open ? 'rotate-180' : '',
|
</p>
|
||||||
'transition duration-200 text-black/70 dark:text-white/70 group-hover:text-sky-500',
|
{modelCount > 0 && (
|
||||||
|
<p className="text-[10px] lg:text-[11px] text-black/50 dark:text-white/50">
|
||||||
|
{modelCount} model{modelCount !== 1 ? 's' : ''} configured
|
||||||
|
</p>
|
||||||
)}
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-row items-center gap-1">
|
||||||
|
<UpdateProvider
|
||||||
|
fields={fields}
|
||||||
|
modelProvider={modelProvider}
|
||||||
|
setProviders={setProviders}
|
||||||
|
/>
|
||||||
|
<DeleteProvider
|
||||||
|
modelProvider={modelProvider}
|
||||||
|
setProviders={setProviders}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<AnimatePresence>
|
<div className="flex flex-col gap-y-4 px-5 py-4">
|
||||||
{open && (
|
<div className="flex flex-col gap-y-2">
|
||||||
<motion.div
|
<div className="flex flex-row w-full justify-between items-center">
|
||||||
initial={{ height: 0, opacity: 0 }}
|
<p className="text-[11px] lg:text-[11px] font-medium text-black/70 dark:text-white/70 uppercase tracking-wide">
|
||||||
animate={{ height: 'auto', opacity: 1 }}
|
Chat Models
|
||||||
exit={{ height: 0, opacity: 0 }}
|
</p>
|
||||||
transition={{ duration: 0.1 }}
|
{!modelProvider.chatModels.some((m) => m.key === 'error') && (
|
||||||
>
|
<AddModel
|
||||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
providerId={modelProvider.id}
|
||||||
<div className="flex flex-col gap-y-4 px-5 py-4">
|
setProviders={setProviders}
|
||||||
{modelProvider.chatModels.length > 0 && (
|
type="chat"
|
||||||
<div className="flex flex-col gap-y-2">
|
/>
|
||||||
<div className="flex flex-row w-full justify-between items-center">
|
)}
|
||||||
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
|
</div>
|
||||||
Chat models
|
<div className="flex flex-col gap-2">
|
||||||
</p>
|
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
|
||||||
<AddModel
|
<div className="flex flex-row items-center gap-2 text-xs lg:text-xs text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||||
providerId={modelProvider.id}
|
<AlertCircle size={16} className="shrink-0" />
|
||||||
setProviders={setProviders}
|
<span className="break-words">
|
||||||
type="chat"
|
{
|
||||||
/>
|
modelProvider.chatModels.find((m) => m.key === 'error')
|
||||||
|
?.name
|
||||||
|
}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
) : modelProvider.chatModels.filter((m) => m.key !== 'error')
|
||||||
|
.length === 0 && !hasError ? (
|
||||||
|
<div className="flex flex-col items-center justify-center py-4 px-4 rounded-lg border-2 border-dashed border-light-200 dark:border-dark-200 bg-light-secondary/20 dark:bg-dark-secondary/20">
|
||||||
|
<p className="text-xs text-black/50 dark:text-white/50 text-center">
|
||||||
|
No chat models configured
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : modelProvider.chatModels.filter((m) => m.key !== 'error')
|
||||||
|
.length > 0 ? (
|
||||||
|
<div className="flex flex-row flex-wrap gap-2">
|
||||||
|
{modelProvider.chatModels.map((model, index) => (
|
||||||
|
<div
|
||||||
|
key={`${modelProvider.id}-chat-${model.key}-${index}`}
|
||||||
|
className="flex flex-row items-center space-x-1.5 text-xs lg:text-xs text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5 border border-light-200 dark:border-dark-200"
|
||||||
|
>
|
||||||
|
<span>{model.name}</span>
|
||||||
|
<button
|
||||||
|
onClick={() => {
|
||||||
|
handleModelDelete('chat', model.key);
|
||||||
|
}}
|
||||||
|
className="hover:text-red-500 dark:hover:text-red-400 transition-colors"
|
||||||
|
>
|
||||||
|
<X size={12} />
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col gap-2">
|
))}
|
||||||
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
|
</div>
|
||||||
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
) : null}
|
||||||
<AlertCircle size={16} className="shrink-0" />
|
</div>
|
||||||
<span className="break-words">
|
</div>
|
||||||
{
|
|
||||||
modelProvider.chatModels.find(
|
<div className="flex flex-col gap-y-2">
|
||||||
(m) => m.key === 'error',
|
<div className="flex flex-row w-full justify-between items-center">
|
||||||
)?.name
|
<p className="text-[11px] lg:text-[11px] font-medium text-black/70 dark:text-white/70 uppercase tracking-wide">
|
||||||
}
|
Embedding Models
|
||||||
</span>
|
</p>
|
||||||
</div>
|
{!modelProvider.embeddingModels.some((m) => m.key === 'error') && (
|
||||||
) : (
|
<AddModel
|
||||||
<div className="flex flex-row flex-wrap gap-2">
|
providerId={modelProvider.id}
|
||||||
{modelProvider.chatModels.map((model, index) => (
|
setProviders={setProviders}
|
||||||
<div
|
type="embedding"
|
||||||
key={`${modelProvider.id}-chat-${model.key}-${index}`}
|
/>
|
||||||
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
)}
|
||||||
>
|
</div>
|
||||||
<span>{model.name}</span>
|
<div className="flex flex-col gap-2">
|
||||||
<button
|
{modelProvider.embeddingModels.some((m) => m.key === 'error') ? (
|
||||||
onClick={() => {
|
<div className="flex flex-row items-center gap-2 text-xs lg:text-xs text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||||
handleModelDelete('chat', model.key);
|
<AlertCircle size={16} className="shrink-0" />
|
||||||
}}
|
<span className="break-words">
|
||||||
>
|
{
|
||||||
<X size={12} />
|
modelProvider.embeddingModels.find((m) => m.key === 'error')
|
||||||
</button>
|
?.name
|
||||||
</div>
|
}
|
||||||
))}
|
</span>
|
||||||
</div>
|
</div>
|
||||||
)}
|
) : modelProvider.embeddingModels.filter((m) => m.key !== 'error')
|
||||||
|
.length === 0 && !hasError ? (
|
||||||
|
<div className="flex flex-col items-center justify-center py-4 px-4 rounded-lg border-2 border-dashed border-light-200 dark:border-dark-200 bg-light-secondary/20 dark:bg-dark-secondary/20">
|
||||||
|
<p className="text-xs text-black/50 dark:text-white/50 text-center">
|
||||||
|
No embedding models configured
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : modelProvider.embeddingModels.filter((m) => m.key !== 'error')
|
||||||
|
.length > 0 ? (
|
||||||
|
<div className="flex flex-row flex-wrap gap-2">
|
||||||
|
{modelProvider.embeddingModels.map((model, index) => (
|
||||||
|
<div
|
||||||
|
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
|
||||||
|
className="flex flex-row items-center space-x-1.5 text-xs lg:text-xs text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5 border border-light-200 dark:border-dark-200"
|
||||||
|
>
|
||||||
|
<span>{model.name}</span>
|
||||||
|
<button
|
||||||
|
onClick={() => {
|
||||||
|
handleModelDelete('embedding', model.key);
|
||||||
|
}}
|
||||||
|
className="hover:text-red-500 dark:hover:text-red-400 transition-colors"
|
||||||
|
>
|
||||||
|
<X size={12} />
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
))}
|
||||||
)}
|
</div>
|
||||||
{modelProvider.embeddingModels.length > 0 && (
|
) : null}
|
||||||
<div className="flex flex-col gap-y-2">
|
</div>
|
||||||
<div className="flex flex-row w-full justify-between items-center">
|
</div>
|
||||||
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
|
</div>
|
||||||
Embedding models
|
|
||||||
</p>
|
|
||||||
<AddModel
|
|
||||||
providerId={modelProvider.id}
|
|
||||||
setProviders={setProviders}
|
|
||||||
type="embedding"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-col gap-2">
|
|
||||||
{modelProvider.embeddingModels.some(
|
|
||||||
(m) => m.key === 'error',
|
|
||||||
) ? (
|
|
||||||
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
|
||||||
<AlertCircle size={16} className="shrink-0" />
|
|
||||||
<span className="break-words">
|
|
||||||
{
|
|
||||||
modelProvider.embeddingModels.find(
|
|
||||||
(m) => m.key === 'error',
|
|
||||||
)?.name
|
|
||||||
}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="flex flex-row flex-wrap gap-2">
|
|
||||||
{modelProvider.embeddingModels.map((model, index) => (
|
|
||||||
<div
|
|
||||||
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
|
|
||||||
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
|
||||||
>
|
|
||||||
<span>{model.name}</span>
|
|
||||||
<button
|
|
||||||
onClick={() => {
|
|
||||||
handleModelDelete('embedding', model.key);
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<X size={12} />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</motion.div>
|
|
||||||
)}
|
|
||||||
</AnimatePresence>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import Select from '@/components/ui/Select';
|
import Select from '@/components/ui/Select';
|
||||||
import { ConfigModelProvider } from '@/lib/config/types';
|
import { ConfigModelProvider } from '@/lib/config/types';
|
||||||
|
import { useChat } from '@/lib/hooks/useChat';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
|
|
||||||
@@ -16,6 +17,7 @@ const ModelSelect = ({
|
|||||||
: `${localStorage.getItem('embeddingModelProviderId')}/${localStorage.getItem('embeddingModelKey')}`,
|
: `${localStorage.getItem('embeddingModelProviderId')}/${localStorage.getItem('embeddingModelKey')}`,
|
||||||
);
|
);
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
|
const { setChatModelProvider, setEmbeddingModelProvider } = useChat();
|
||||||
|
|
||||||
const handleSave = async (newValue: string) => {
|
const handleSave = async (newValue: string) => {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
@@ -23,20 +25,27 @@ const ModelSelect = ({
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
if (type === 'chat') {
|
if (type === 'chat') {
|
||||||
localStorage.setItem('chatModelProviderId', newValue.split('/')[0]);
|
const providerId = newValue.split('/')[0];
|
||||||
localStorage.setItem(
|
const modelKey = newValue.split('/').slice(1).join('/');
|
||||||
'chatModelKey',
|
|
||||||
newValue.split('/').slice(1).join('/'),
|
localStorage.setItem('chatModelProviderId', providerId);
|
||||||
);
|
localStorage.setItem('chatModelKey', modelKey);
|
||||||
|
|
||||||
|
setChatModelProvider({
|
||||||
|
providerId: providerId,
|
||||||
|
key: modelKey,
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
localStorage.setItem(
|
const providerId = newValue.split('/')[0];
|
||||||
'embeddingModelProviderId',
|
const modelKey = newValue.split('/').slice(1).join('/');
|
||||||
newValue.split('/')[0],
|
|
||||||
);
|
localStorage.setItem('embeddingModelProviderId', providerId);
|
||||||
localStorage.setItem(
|
localStorage.setItem('embeddingModelKey', modelKey);
|
||||||
'embeddingModelKey',
|
|
||||||
newValue.split('/').slice(1).join('/'),
|
setEmbeddingModelProvider({
|
||||||
);
|
providerId: providerId,
|
||||||
|
key: modelKey,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error saving config:', error);
|
console.error('Error saving config:', error);
|
||||||
@@ -50,13 +59,13 @@ const ModelSelect = ({
|
|||||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||||
<div className="space-y-3 lg:space-y-5">
|
<div className="space-y-3 lg:space-y-5">
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm lg:text-base text-black dark:text-white">
|
<h4 className="text-sm lg:text-sm text-black dark:text-white">
|
||||||
Select {type === 'chat' ? 'Chat Model' : 'Embedding Model'}
|
Select {type === 'chat' ? 'Chat Model' : 'Embedding Model'}
|
||||||
</h4>
|
</h4>
|
||||||
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
||||||
{type === 'chat'
|
{type === 'chat'
|
||||||
? 'Select the model to use for chat responses'
|
? 'Choose which model to use for generating responses'
|
||||||
: 'Select the model to use for embeddings'}
|
: 'Choose which model to use for generating embeddings'}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<Select
|
<Select
|
||||||
@@ -77,7 +86,7 @@ const ModelSelect = ({
|
|||||||
})),
|
})),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="!text-xs lg:!text-sm"
|
className="!text-xs lg:!text-[13px]"
|
||||||
loading={loading}
|
loading={loading}
|
||||||
disabled={loading}
|
disabled={loading}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ const Models = ({
|
|||||||
return (
|
return (
|
||||||
<div className="flex-1 space-y-6 overflow-y-auto py-6">
|
<div className="flex-1 space-y-6 overflow-y-auto py-6">
|
||||||
<div className="flex flex-col px-6 gap-y-4">
|
<div className="flex flex-col px-6 gap-y-4">
|
||||||
<h3 className="text-xs lg:text-sm text-black/70 dark:text-white/70">
|
<h3 className="text-xs lg:text-xs text-black/70 dark:text-white/70">
|
||||||
Select models
|
Select models
|
||||||
</h3>
|
</h3>
|
||||||
<ModelSelect
|
<ModelSelect
|
||||||
@@ -38,23 +38,51 @@ const Models = ({
|
|||||||
</div>
|
</div>
|
||||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||||
<div className="flex flex-row justify-between items-center px-6 ">
|
<div className="flex flex-row justify-between items-center px-6 ">
|
||||||
<p className="text-xs lg:text-sm text-black/70 dark:text-white/70">
|
<p className="text-xs lg:text-xs text-black/70 dark:text-white/70">
|
||||||
Manage model provider
|
Manage connections
|
||||||
</p>
|
</p>
|
||||||
<AddProvider modelProviders={fields} setProviders={setProviders} />
|
<AddProvider modelProviders={fields} setProviders={setProviders} />
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col px-6 gap-y-4">
|
<div className="flex flex-col px-6 gap-y-4">
|
||||||
{providers.map((provider) => (
|
{providers.length === 0 ? (
|
||||||
<ModelProvider
|
<div className="flex flex-col items-center justify-center py-12 px-4 rounded-lg border-2 border-dashed border-light-200 dark:border-dark-200 bg-light-secondary/10 dark:bg-dark-secondary/10">
|
||||||
key={`provider-${provider.id}`}
|
<div className="p-3 rounded-full bg-sky-500/10 dark:bg-sky-500/10 mb-3">
|
||||||
fields={
|
<svg
|
||||||
(fields.find((f) => f.key === provider.type)?.fields ??
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
[]) as UIConfigField[]
|
className="w-8 h-8 text-sky-500"
|
||||||
}
|
fill="none"
|
||||||
modelProvider={provider}
|
viewBox="0 0 24 24"
|
||||||
setProviders={setProviders}
|
stroke="currentColor"
|
||||||
/>
|
>
|
||||||
))}
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
strokeWidth={2}
|
||||||
|
d="M13 10V3L4 14h7v7l9-11h-7z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm font-medium text-black/70 dark:text-white/70 mb-1">
|
||||||
|
No connections yet
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-black/50 dark:text-white/50 text-center max-w-sm mb-4">
|
||||||
|
Add your first connection to start using AI models. Connect to
|
||||||
|
OpenAI, Anthropic, Ollama, and more.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
providers.map((provider) => (
|
||||||
|
<ModelProvider
|
||||||
|
key={`provider-${provider.id}`}
|
||||||
|
fields={
|
||||||
|
(fields.find((f) => f.key === provider.type)?.fields ??
|
||||||
|
[]) as UIConfigField[]
|
||||||
|
}
|
||||||
|
modelProvider={provider}
|
||||||
|
setProviders={setProviders}
|
||||||
|
/>
|
||||||
|
))
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -67,10 +67,10 @@ const UpdateProvider = ({
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
toast.success('Provider updated successfully.');
|
toast.success('Connection updated successfully.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error updating provider:', error);
|
console.error('Error updating provider:', error);
|
||||||
toast.error('Failed to update provider.');
|
toast.error('Failed to update connection.');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
setOpen(false);
|
setOpen(false);
|
||||||
@@ -109,8 +109,8 @@ const UpdateProvider = ({
|
|||||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||||
<form onSubmit={handleSubmit} className="flex flex-col flex-1">
|
<form onSubmit={handleSubmit} className="flex flex-col flex-1">
|
||||||
<div className="px-6 pt-6 pb-4">
|
<div className="px-6 pt-6 pb-4">
|
||||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
<h3 className="text-black/90 dark:text-white/90 font-medium text-sm">
|
||||||
Update provider
|
Update connection
|
||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||||
@@ -121,13 +121,13 @@ const UpdateProvider = ({
|
|||||||
className="flex flex-col items-start space-y-2"
|
className="flex flex-col items-start space-y-2"
|
||||||
>
|
>
|
||||||
<label className="text-xs text-black/70 dark:text-white/70">
|
<label className="text-xs text-black/70 dark:text-white/70">
|
||||||
Name*
|
Connection Name*
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
value={name}
|
value={name}
|
||||||
onChange={(event) => setName(event.target.value)}
|
onChange={(event) => setName(event.target.value)}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder={'Provider Name'}
|
placeholder={'Connection Name'}
|
||||||
type="text"
|
type="text"
|
||||||
required={true}
|
required={true}
|
||||||
/>
|
/>
|
||||||
@@ -150,7 +150,7 @@ const UpdateProvider = ({
|
|||||||
[field.key]: event.target.value,
|
[field.key]: event.target.value,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder={
|
placeholder={
|
||||||
(field as StringUIConfigField).placeholder
|
(field as StringUIConfigField).placeholder
|
||||||
}
|
}
|
||||||
@@ -166,12 +166,12 @@ const UpdateProvider = ({
|
|||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
disabled={loading}
|
disabled={loading}
|
||||||
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
className="px-4 py-2 rounded-lg text-[13px] bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||||
>
|
>
|
||||||
{loading ? (
|
{loading ? (
|
||||||
<Loader2 className="animate-spin" size={16} />
|
<Loader2 className="animate-spin" size={16} />
|
||||||
) : (
|
) : (
|
||||||
'Update Provider'
|
'Update Connection'
|
||||||
)}
|
)}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
29
src/components/Settings/Sections/Personalization.tsx
Normal file
29
src/components/Settings/Sections/Personalization.tsx
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
|
import SettingsField from '../SettingsField';
|
||||||
|
|
||||||
|
const Personalization = ({
|
||||||
|
fields,
|
||||||
|
values,
|
||||||
|
}: {
|
||||||
|
fields: UIConfigField[];
|
||||||
|
values: Record<string, any>;
|
||||||
|
}) => {
|
||||||
|
return (
|
||||||
|
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6">
|
||||||
|
{fields.map((field) => (
|
||||||
|
<SettingsField
|
||||||
|
key={field.key}
|
||||||
|
field={field}
|
||||||
|
value={
|
||||||
|
(field.scope === 'client'
|
||||||
|
? localStorage.getItem(field.key)
|
||||||
|
: values[field.key]) ?? field.default
|
||||||
|
}
|
||||||
|
dataAdd="personalization"
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Personalization;
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { UIConfigField } from '@/lib/config/types';
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
import SettingsField from '../SettingsField';
|
import SettingsField from '../SettingsField';
|
||||||
|
|
||||||
const General = ({
|
const Preferences = ({
|
||||||
fields,
|
fields,
|
||||||
values,
|
values,
|
||||||
}: {
|
}: {
|
||||||
@@ -19,11 +19,11 @@ const General = ({
|
|||||||
? localStorage.getItem(field.key)
|
? localStorage.getItem(field.key)
|
||||||
: values[field.key]) ?? field.default
|
: values[field.key]) ?? field.default
|
||||||
}
|
}
|
||||||
dataAdd="general"
|
dataAdd="preferences"
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default General;
|
export default Preferences;
|
||||||
@@ -9,7 +9,7 @@ const SettingsButtonMobile = () => {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<button className="lg:hidden" onClick={() => setIsOpen(true)}>
|
<button className="lg:hidden" onClick={() => setIsOpen(true)}>
|
||||||
<Settings size={18}/>
|
<Settings size={18} />
|
||||||
</button>
|
</button>
|
||||||
<AnimatePresence>
|
<AnimatePresence>
|
||||||
{isOpen && <SettingsDialogue isOpen={isOpen} setIsOpen={setIsOpen} />}
|
{isOpen && <SettingsDialogue isOpen={isOpen} setIsOpen={setIsOpen} />}
|
||||||
|
|||||||
@@ -4,9 +4,10 @@ import {
|
|||||||
BrainCog,
|
BrainCog,
|
||||||
ChevronLeft,
|
ChevronLeft,
|
||||||
Search,
|
Search,
|
||||||
Settings,
|
Sliders,
|
||||||
|
ToggleRight,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import General from './Sections/General';
|
import Preferences from './Sections/Preferences';
|
||||||
import { motion } from 'framer-motion';
|
import { motion } from 'framer-motion';
|
||||||
import { useEffect, useState } from 'react';
|
import { useEffect, useState } from 'react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
@@ -15,20 +16,29 @@ import { cn } from '@/lib/utils';
|
|||||||
import Models from './Sections/Models/Section';
|
import Models from './Sections/Models/Section';
|
||||||
import SearchSection from './Sections/Search';
|
import SearchSection from './Sections/Search';
|
||||||
import Select from '@/components/ui/Select';
|
import Select from '@/components/ui/Select';
|
||||||
|
import Personalization from './Sections/Personalization';
|
||||||
|
|
||||||
const sections = [
|
const sections = [
|
||||||
{
|
{
|
||||||
key: 'general',
|
key: 'preferences',
|
||||||
name: 'General',
|
name: 'Preferences',
|
||||||
description: 'Adjust common settings.',
|
description: 'Customize your application preferences.',
|
||||||
icon: Settings,
|
icon: Sliders,
|
||||||
component: General,
|
component: Preferences,
|
||||||
dataAdd: 'general',
|
dataAdd: 'preferences',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'personalization',
|
||||||
|
name: 'Personalization',
|
||||||
|
description: 'Customize the behavior and tone of the model.',
|
||||||
|
icon: ToggleRight,
|
||||||
|
component: Personalization,
|
||||||
|
dataAdd: 'personalization',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'models',
|
key: 'models',
|
||||||
name: 'Models',
|
name: 'Models',
|
||||||
description: 'Configure model settings.',
|
description: 'Connect to AI services and manage connections.',
|
||||||
icon: BrainCog,
|
icon: BrainCog,
|
||||||
component: Models,
|
component: Models,
|
||||||
dataAdd: 'modelProviders',
|
dataAdd: 'modelProviders',
|
||||||
@@ -166,7 +176,7 @@ const SettingsDialogue = ({
|
|||||||
<div className="flex flex-1 flex-col overflow-hidden">
|
<div className="flex flex-1 flex-col overflow-hidden">
|
||||||
<div className="border-b border-light-200/60 px-6 pb-6 lg:pt-6 dark:border-dark-200/60 flex-shrink-0">
|
<div className="border-b border-light-200/60 px-6 pb-6 lg:pt-6 dark:border-dark-200/60 flex-shrink-0">
|
||||||
<div className="flex flex-col">
|
<div className="flex flex-col">
|
||||||
<h4 className="font-medium text-black dark:text-white text-sm lg:text-base">
|
<h4 className="font-medium text-black dark:text-white text-sm lg:text-sm">
|
||||||
{selectedSection.name}
|
{selectedSection.name}
|
||||||
</h4>
|
</h4>
|
||||||
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import {
|
import {
|
||||||
SelectUIConfigField,
|
SelectUIConfigField,
|
||||||
StringUIConfigField,
|
StringUIConfigField,
|
||||||
|
SwitchUIConfigField,
|
||||||
TextareaUIConfigField,
|
TextareaUIConfigField,
|
||||||
UIConfigField,
|
UIConfigField,
|
||||||
} from '@/lib/config/types';
|
} from '@/lib/config/types';
|
||||||
@@ -9,6 +10,7 @@ import Select from '../ui/Select';
|
|||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { useTheme } from 'next-themes';
|
import { useTheme } from 'next-themes';
|
||||||
import { Loader2 } from 'lucide-react';
|
import { Loader2 } from 'lucide-react';
|
||||||
|
import { Switch } from '@headlessui/react';
|
||||||
|
|
||||||
const SettingsSelect = ({
|
const SettingsSelect = ({
|
||||||
field,
|
field,
|
||||||
@@ -62,7 +64,7 @@ const SettingsSelect = ({
|
|||||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||||
<div className="space-y-3 lg:space-y-5">
|
<div className="space-y-3 lg:space-y-5">
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm lg:text-base text-black dark:text-white">
|
<h4 className="text-sm lg:text-sm text-black dark:text-white">
|
||||||
{field.name}
|
{field.name}
|
||||||
</h4>
|
</h4>
|
||||||
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
||||||
@@ -133,7 +135,7 @@ const SettingsInput = ({
|
|||||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||||
<div className="space-y-3 lg:space-y-5">
|
<div className="space-y-3 lg:space-y-5">
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm lg:text-base text-black dark:text-white">
|
<h4 className="text-sm lg:text-sm text-black dark:text-white">
|
||||||
{field.name}
|
{field.name}
|
||||||
</h4>
|
</h4>
|
||||||
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
||||||
@@ -145,7 +147,7 @@ const SettingsInput = ({
|
|||||||
value={value ?? field.default ?? ''}
|
value={value ?? field.default ?? ''}
|
||||||
onChange={(event) => setValue(event.target.value)}
|
onChange={(event) => setValue(event.target.value)}
|
||||||
onBlur={(event) => handleSave(event.target.value)}
|
onBlur={(event) => handleSave(event.target.value)}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-3 py-2 lg:px-4 lg:py-3 pr-10 !text-xs lg:!text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-3 py-2 lg:px-4 lg:py-3 pr-10 !text-xs lg:!text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder={field.placeholder}
|
placeholder={field.placeholder}
|
||||||
type="text"
|
type="text"
|
||||||
disabled={loading}
|
disabled={loading}
|
||||||
@@ -209,7 +211,7 @@ const SettingsTextarea = ({
|
|||||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||||
<div className="space-y-3 lg:space-y-5">
|
<div className="space-y-3 lg:space-y-5">
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm lg:text-base text-black dark:text-white">
|
<h4 className="text-sm lg:text-sm text-black dark:text-white">
|
||||||
{field.name}
|
{field.name}
|
||||||
</h4>
|
</h4>
|
||||||
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
||||||
@@ -221,7 +223,7 @@ const SettingsTextarea = ({
|
|||||||
value={value ?? field.default ?? ''}
|
value={value ?? field.default ?? ''}
|
||||||
onChange={(event) => setValue(event.target.value)}
|
onChange={(event) => setValue(event.target.value)}
|
||||||
onBlur={(event) => handleSave(event.target.value)}
|
onBlur={(event) => handleSave(event.target.value)}
|
||||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-3 py-2 lg:px-4 lg:py-3 pr-10 !text-xs lg:!text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-3 py-2 lg:px-4 lg:py-3 pr-10 !text-xs lg:!text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||||
placeholder={field.placeholder}
|
placeholder={field.placeholder}
|
||||||
rows={4}
|
rows={4}
|
||||||
disabled={loading}
|
disabled={loading}
|
||||||
@@ -237,6 +239,79 @@ const SettingsTextarea = ({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const SettingsSwitch = ({
|
||||||
|
field,
|
||||||
|
value,
|
||||||
|
setValue,
|
||||||
|
dataAdd,
|
||||||
|
}: {
|
||||||
|
field: SwitchUIConfigField;
|
||||||
|
value?: any;
|
||||||
|
setValue: (value: any) => void;
|
||||||
|
dataAdd: string;
|
||||||
|
}) => {
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
|
||||||
|
const handleSave = async (newValue: boolean) => {
|
||||||
|
setLoading(true);
|
||||||
|
setValue(newValue);
|
||||||
|
try {
|
||||||
|
if (field.scope === 'client') {
|
||||||
|
localStorage.setItem(field.key, String(newValue));
|
||||||
|
} else {
|
||||||
|
const res = await fetch('/api/config', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
key: `${dataAdd}.${field.key}`,
|
||||||
|
value: newValue,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
console.error('Failed to save config:', await res.text());
|
||||||
|
throw new Error('Failed to save configuration');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error saving config:', error);
|
||||||
|
toast.error('Failed to save configuration.');
|
||||||
|
} finally {
|
||||||
|
setTimeout(() => setLoading(false), 150);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const isChecked = value === true || value === 'true';
|
||||||
|
|
||||||
|
return (
|
||||||
|
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||||
|
<div className="flex flex-row items-center space-x-3 lg:space-x-5 w-full justify-between">
|
||||||
|
<div>
|
||||||
|
<h4 className="text-sm lg:text-sm text-black dark:text-white">
|
||||||
|
{field.name}
|
||||||
|
</h4>
|
||||||
|
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Switch
|
||||||
|
checked={isChecked}
|
||||||
|
onChange={handleSave}
|
||||||
|
disabled={loading}
|
||||||
|
className="group relative flex h-6 w-12 shrink-0 cursor-pointer rounded-full bg-white/10 p-1 duration-200 ease-in-out focus:outline-none transition-colors disabled:opacity-60 disabled:cursor-not-allowed data-[checked]:bg-sky-500"
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
aria-hidden="true"
|
||||||
|
className="pointer-events-none inline-block size-4 translate-x-0 rounded-full bg-white shadow-lg ring-0 transition duration-200 ease-in-out group-data-[checked]:translate-x-6"
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
const SettingsField = ({
|
const SettingsField = ({
|
||||||
field,
|
field,
|
||||||
value,
|
value,
|
||||||
@@ -276,6 +351,15 @@ const SettingsField = ({
|
|||||||
dataAdd={dataAdd}
|
dataAdd={dataAdd}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
case 'switch':
|
||||||
|
return (
|
||||||
|
<SettingsSwitch
|
||||||
|
field={field}
|
||||||
|
value={val}
|
||||||
|
setValue={setVal}
|
||||||
|
dataAdd={dataAdd}
|
||||||
|
/>
|
||||||
|
);
|
||||||
default:
|
default:
|
||||||
return <div>Unsupported field type: {field.type}</div>;
|
return <div>Unsupported field type: {field.type}</div>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -63,8 +63,11 @@ const SetupConfig = ({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const visibleProviders = providers.filter(
|
||||||
|
(p) => p.name.toLowerCase() !== 'transformers',
|
||||||
|
);
|
||||||
const hasProviders =
|
const hasProviders =
|
||||||
providers.filter((p) => p.chatModels.length > 0).length > 0;
|
visibleProviders.filter((p) => p.chatModels.length > 0).length > 0;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="w-[95vw] md:w-[80vw] lg:w-[65vw] mx-auto px-2 sm:px-4 md:px-6 flex flex-col space-y-6">
|
<div className="w-[95vw] md:w-[80vw] lg:w-[65vw] mx-auto px-2 sm:px-4 md:px-6 flex flex-col space-y-6">
|
||||||
@@ -82,10 +85,10 @@ const SetupConfig = ({
|
|||||||
<div className="flex flex-row justify-between items-center mb-4 md:mb-6 pb-3 md:pb-4 border-b border-light-200 dark:border-dark-200">
|
<div className="flex flex-row justify-between items-center mb-4 md:mb-6 pb-3 md:pb-4 border-b border-light-200 dark:border-dark-200">
|
||||||
<div>
|
<div>
|
||||||
<p className="text-xs sm:text-sm font-medium text-black dark:text-white">
|
<p className="text-xs sm:text-sm font-medium text-black dark:text-white">
|
||||||
Manage Providers
|
Manage Connections
|
||||||
</p>
|
</p>
|
||||||
<p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-0.5">
|
<p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-0.5">
|
||||||
Add and configure your model providers
|
Add connections to access AI models
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<AddProvider
|
<AddProvider
|
||||||
@@ -101,14 +104,17 @@ const SetupConfig = ({
|
|||||||
Loading providers...
|
Loading providers...
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
) : providers.length === 0 ? (
|
) : visibleProviders.length === 0 ? (
|
||||||
<div className="flex flex-col items-center justify-center py-8 md:py-12 text-center">
|
<div className="flex flex-col items-center justify-center py-8 md:py-12 text-center">
|
||||||
<p className="text-xs sm:text-sm font-medium text-black/70 dark:text-white/70">
|
<p className="text-xs sm:text-sm font-medium text-black/70 dark:text-white/70">
|
||||||
No providers configured
|
No connections configured
|
||||||
|
</p>
|
||||||
|
<p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-1">
|
||||||
|
Click "Add Connection" above to get started
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
providers.map((provider) => (
|
visibleProviders.map((provider) => (
|
||||||
<ModelProvider
|
<ModelProvider
|
||||||
key={`provider-${provider.id}`}
|
key={`provider-${provider.id}`}
|
||||||
fields={
|
fields={
|
||||||
|
|||||||
65
src/lib/agents/media/image.ts
Normal file
65
src/lib/agents/media/image.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/* I don't think can be classified as agents but to keep the structure consistent i guess ill keep it here */
|
||||||
|
|
||||||
|
import {
|
||||||
|
RunnableSequence,
|
||||||
|
RunnableMap,
|
||||||
|
RunnableLambda,
|
||||||
|
} from '@langchain/core/runnables';
|
||||||
|
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
||||||
|
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
|
||||||
|
import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||||
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
|
import { searchSearxng } from '@/lib/searxng';
|
||||||
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import LineOutputParser from '@/lib/outputParsers/lineOutputParser';
|
||||||
|
import { imageSearchFewShots, imageSearchPrompt } from '@/lib/prompts/media/image';
|
||||||
|
|
||||||
|
type ImageSearchChainInput = {
|
||||||
|
chatHistory: BaseMessage[];
|
||||||
|
query: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ImageSearchResult = {
|
||||||
|
img_src: string;
|
||||||
|
url: string;
|
||||||
|
title: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputParser = new LineOutputParser({
|
||||||
|
key: 'query',
|
||||||
|
})
|
||||||
|
|
||||||
|
const searchImages = async (
|
||||||
|
input: ImageSearchChainInput,
|
||||||
|
llm: BaseChatModel,
|
||||||
|
) => {
|
||||||
|
const chatPrompt = await ChatPromptTemplate.fromMessages([
|
||||||
|
new SystemMessage(imageSearchPrompt),
|
||||||
|
...imageSearchFewShots,
|
||||||
|
new HumanMessage(`<conversation>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`)
|
||||||
|
]).formatMessages({})
|
||||||
|
|
||||||
|
const res = await llm.invoke(chatPrompt)
|
||||||
|
|
||||||
|
const query = await outputParser.invoke(res)
|
||||||
|
|
||||||
|
const searchRes = await searchSearxng(query!, {
|
||||||
|
engines: ['bing images', 'google images'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const images: ImageSearchResult[] = [];
|
||||||
|
|
||||||
|
searchRes.results.forEach((result) => {
|
||||||
|
if (result.img_src && result.url && result.title) {
|
||||||
|
images.push({
|
||||||
|
img_src: result.img_src,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return images.slice(0, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default searchImages;
|
||||||
65
src/lib/agents/media/video.ts
Normal file
65
src/lib/agents/media/video.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
||||||
|
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
|
||||||
|
import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||||
|
import { searchSearxng } from '@/lib/searxng';
|
||||||
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import LineOutputParser from '@/lib/outputParsers/lineOutputParser';
|
||||||
|
import { videoSearchFewShots, videoSearchPrompt } from '@/lib/prompts/media/videos';
|
||||||
|
|
||||||
|
type VideoSearchChainInput = {
|
||||||
|
chatHistory: BaseMessage[];
|
||||||
|
query: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type VideoSearchResult = {
|
||||||
|
img_src: string;
|
||||||
|
url: string;
|
||||||
|
title: string;
|
||||||
|
iframe_src: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputParser = new LineOutputParser({
|
||||||
|
key: 'query',
|
||||||
|
});
|
||||||
|
|
||||||
|
const searchVideos = async (
|
||||||
|
input: VideoSearchChainInput,
|
||||||
|
llm: BaseChatModel,
|
||||||
|
) => {
|
||||||
|
const chatPrompt = await ChatPromptTemplate.fromMessages([
|
||||||
|
new SystemMessage(videoSearchPrompt),
|
||||||
|
...videoSearchFewShots,
|
||||||
|
new HumanMessage(`<conversation>${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`)
|
||||||
|
]).formatMessages({})
|
||||||
|
|
||||||
|
const res = await llm.invoke(chatPrompt)
|
||||||
|
|
||||||
|
const query = await outputParser.invoke(res)
|
||||||
|
|
||||||
|
const searchRes = await searchSearxng(query!, {
|
||||||
|
engines: ['youtube'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const videos: VideoSearchResult[] = [];
|
||||||
|
|
||||||
|
searchRes.results.forEach((result) => {
|
||||||
|
if (
|
||||||
|
result.thumbnail &&
|
||||||
|
result.url &&
|
||||||
|
result.title &&
|
||||||
|
result.iframe_src
|
||||||
|
) {
|
||||||
|
videos.push({
|
||||||
|
img_src: result.thumbnail,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
iframe_src: result.iframe_src,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return videos.slice(0, 10);
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
export default searchVideos;
|
||||||
32
src/lib/agents/suggestions/index.ts
Normal file
32
src/lib/agents/suggestions/index.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import ListLineOutputParser from '@/lib/outputParsers/listLineOutputParser';
|
||||||
|
import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts';
|
||||||
|
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
|
||||||
|
import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||||
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import { suggestionGeneratorPrompt } from '@/lib/prompts/suggestions';
|
||||||
|
|
||||||
|
type SuggestionGeneratorInput = {
|
||||||
|
chatHistory: BaseMessage[];
|
||||||
|
};
|
||||||
|
|
||||||
|
const outputParser = new ListLineOutputParser({
|
||||||
|
key: 'suggestions',
|
||||||
|
});
|
||||||
|
|
||||||
|
const generateSuggestions = async (
|
||||||
|
input: SuggestionGeneratorInput,
|
||||||
|
llm: BaseChatModel,
|
||||||
|
) => {
|
||||||
|
const chatPrompt = await ChatPromptTemplate.fromMessages([
|
||||||
|
new SystemMessage(suggestionGeneratorPrompt),
|
||||||
|
new HumanMessage(`<conversation>${formatChatHistoryAsString(input.chatHistory)}</conversation>`)
|
||||||
|
]).formatMessages({})
|
||||||
|
|
||||||
|
const res = await llm.invoke(chatPrompt)
|
||||||
|
|
||||||
|
const suggestions = await outputParser.invoke(res)
|
||||||
|
|
||||||
|
return suggestions
|
||||||
|
};
|
||||||
|
|
||||||
|
export default generateSuggestions;
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import {
|
|
||||||
RunnableSequence,
|
|
||||||
RunnableMap,
|
|
||||||
RunnableLambda,
|
|
||||||
} from '@langchain/core/runnables';
|
|
||||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
|
||||||
import { searchSearxng } from '../searxng';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
|
||||||
|
|
||||||
const imageSearchChainPrompt = `
|
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
|
||||||
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
|
||||||
`;
|
|
||||||
|
|
||||||
type ImageSearchChainInput = {
|
|
||||||
chat_history: BaseMessage[];
|
|
||||||
query: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
interface ImageSearchResult {
|
|
||||||
img_src: string;
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strParser = new StringOutputParser();
|
|
||||||
|
|
||||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
|
||||||
return RunnableSequence.from([
|
|
||||||
RunnableMap.from({
|
|
||||||
chat_history: (input: ImageSearchChainInput) => {
|
|
||||||
return formatChatHistoryAsString(input.chat_history);
|
|
||||||
},
|
|
||||||
query: (input: ImageSearchChainInput) => {
|
|
||||||
return input.query;
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
ChatPromptTemplate.fromMessages([
|
|
||||||
['system', imageSearchChainPrompt],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>A cat</query>'],
|
|
||||||
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>Car working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>AC working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
|
|
||||||
],
|
|
||||||
]),
|
|
||||||
llm,
|
|
||||||
strParser,
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const queryParser = new LineOutputParser({
|
|
||||||
key: 'query',
|
|
||||||
});
|
|
||||||
|
|
||||||
return await queryParser.parse(input);
|
|
||||||
}),
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const res = await searchSearxng(input, {
|
|
||||||
engines: ['bing images', 'google images'],
|
|
||||||
});
|
|
||||||
|
|
||||||
const images: ImageSearchResult[] = [];
|
|
||||||
|
|
||||||
res.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
images.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return images.slice(0, 10);
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleImageSearch = (
|
|
||||||
input: ImageSearchChainInput,
|
|
||||||
llm: BaseChatModel,
|
|
||||||
) => {
|
|
||||||
const imageSearchChain = createImageSearchChain(llm);
|
|
||||||
return imageSearchChain.invoke(input);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default handleImageSearch;
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
import { RunnableSequence, RunnableMap } from '@langchain/core/runnables';
|
|
||||||
import ListLineOutputParser from '../outputParsers/listLineOutputParser';
|
|
||||||
import { PromptTemplate } from '@langchain/core/prompts';
|
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
|
||||||
|
|
||||||
const suggestionGeneratorPrompt = `
|
|
||||||
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
|
|
||||||
You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information.
|
|
||||||
Make sure the suggestions are medium in length and are informative and relevant to the conversation.
|
|
||||||
|
|
||||||
Provide these suggestions separated by newlines between the XML tags <suggestions> and </suggestions>. For example:
|
|
||||||
|
|
||||||
<suggestions>
|
|
||||||
Tell me more about SpaceX and their recent projects
|
|
||||||
What is the latest news on SpaceX?
|
|
||||||
Who is the CEO of SpaceX?
|
|
||||||
</suggestions>
|
|
||||||
|
|
||||||
Conversation:
|
|
||||||
{chat_history}
|
|
||||||
`;
|
|
||||||
|
|
||||||
type SuggestionGeneratorInput = {
|
|
||||||
chat_history: BaseMessage[];
|
|
||||||
};
|
|
||||||
|
|
||||||
const outputParser = new ListLineOutputParser({
|
|
||||||
key: 'suggestions',
|
|
||||||
});
|
|
||||||
|
|
||||||
const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
|
|
||||||
return RunnableSequence.from([
|
|
||||||
RunnableMap.from({
|
|
||||||
chat_history: (input: SuggestionGeneratorInput) =>
|
|
||||||
formatChatHistoryAsString(input.chat_history),
|
|
||||||
}),
|
|
||||||
PromptTemplate.fromTemplate(suggestionGeneratorPrompt),
|
|
||||||
llm,
|
|
||||||
outputParser,
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const generateSuggestions = (
|
|
||||||
input: SuggestionGeneratorInput,
|
|
||||||
llm: BaseChatModel,
|
|
||||||
) => {
|
|
||||||
(llm as unknown as ChatOpenAI).temperature = 0;
|
|
||||||
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
|
|
||||||
return suggestionGeneratorChain.invoke(input);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default generateSuggestions;
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
import {
|
|
||||||
RunnableSequence,
|
|
||||||
RunnableMap,
|
|
||||||
RunnableLambda,
|
|
||||||
} from '@langchain/core/runnables';
|
|
||||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
|
||||||
import { searchSearxng } from '../searxng';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
|
||||||
|
|
||||||
const videoSearchChainPrompt = `
|
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
|
||||||
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
|
||||||
`;
|
|
||||||
|
|
||||||
type VideoSearchChainInput = {
|
|
||||||
chat_history: BaseMessage[];
|
|
||||||
query: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
interface VideoSearchResult {
|
|
||||||
img_src: string;
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
iframe_src: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strParser = new StringOutputParser();
|
|
||||||
|
|
||||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
|
||||||
return RunnableSequence.from([
|
|
||||||
RunnableMap.from({
|
|
||||||
chat_history: (input: VideoSearchChainInput) => {
|
|
||||||
return formatChatHistoryAsString(input.chat_history);
|
|
||||||
},
|
|
||||||
query: (input: VideoSearchChainInput) => {
|
|
||||||
return input.query;
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
ChatPromptTemplate.fromMessages([
|
|
||||||
['system', videoSearchChainPrompt],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>How does a car work?</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>Theory of relativity</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>AC working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
|
|
||||||
],
|
|
||||||
]),
|
|
||||||
llm,
|
|
||||||
strParser,
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const queryParser = new LineOutputParser({
|
|
||||||
key: 'query',
|
|
||||||
});
|
|
||||||
return await queryParser.parse(input);
|
|
||||||
}),
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const res = await searchSearxng(input, {
|
|
||||||
engines: ['youtube'],
|
|
||||||
});
|
|
||||||
|
|
||||||
const videos: VideoSearchResult[] = [];
|
|
||||||
|
|
||||||
res.results.forEach((result) => {
|
|
||||||
if (
|
|
||||||
result.thumbnail &&
|
|
||||||
result.url &&
|
|
||||||
result.title &&
|
|
||||||
result.iframe_src
|
|
||||||
) {
|
|
||||||
videos.push({
|
|
||||||
img_src: result.thumbnail,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: result.iframe_src,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return videos.slice(0, 10);
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleVideoSearch = (
|
|
||||||
input: VideoSearchChainInput,
|
|
||||||
llm: BaseChatModel,
|
|
||||||
) => {
|
|
||||||
const videoSearchChain = createVideoSearchChain(llm);
|
|
||||||
return videoSearchChain.invoke(input);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default handleVideoSearch;
|
|
||||||
@@ -6,11 +6,8 @@ const getClientConfig = (key: string, defaultVal?: any) => {
|
|||||||
|
|
||||||
export const getTheme = () => getClientConfig('theme', 'dark');
|
export const getTheme = () => getClientConfig('theme', 'dark');
|
||||||
|
|
||||||
export const getAutoImageSearch = () =>
|
export const getAutoMediaSearch = () =>
|
||||||
Boolean(getClientConfig('autoImageSearch', 'true'));
|
getClientConfig('autoMediaSearch', 'true') === 'true';
|
||||||
|
|
||||||
export const getAutoVideoSearch = () =>
|
|
||||||
Boolean(getClientConfig('autoVideoSearch', 'true'));
|
|
||||||
|
|
||||||
export const getSystemInstructions = () =>
|
export const getSystemInstructions = () =>
|
||||||
getClientConfig('systemInstructions', '');
|
getClientConfig('systemInstructions', '');
|
||||||
|
|||||||
@@ -13,14 +13,15 @@ class ConfigManager {
|
|||||||
currentConfig: Config = {
|
currentConfig: Config = {
|
||||||
version: this.configVersion,
|
version: this.configVersion,
|
||||||
setupComplete: false,
|
setupComplete: false,
|
||||||
general: {},
|
preferences: {},
|
||||||
|
personalization: {},
|
||||||
modelProviders: [],
|
modelProviders: [],
|
||||||
search: {
|
search: {
|
||||||
searxngURL: '',
|
searxngURL: '',
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
uiConfigSections: UIConfigSections = {
|
uiConfigSections: UIConfigSections = {
|
||||||
general: [
|
preferences: [
|
||||||
{
|
{
|
||||||
name: 'Theme',
|
name: 'Theme',
|
||||||
key: 'theme',
|
key: 'theme',
|
||||||
@@ -40,16 +41,6 @@ class ConfigManager {
|
|||||||
default: 'dark',
|
default: 'dark',
|
||||||
scope: 'client',
|
scope: 'client',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: 'System Instructions',
|
|
||||||
key: 'systemInstructions',
|
|
||||||
type: 'textarea',
|
|
||||||
required: false,
|
|
||||||
description: 'Add custom behavior or tone for the model.',
|
|
||||||
placeholder:
|
|
||||||
'e.g., "Respond in a friendly and concise tone" or "Use British English and format answers as bullet points."',
|
|
||||||
scope: 'client',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: 'Measurement Unit',
|
name: 'Measurement Unit',
|
||||||
key: 'measureUnit',
|
key: 'measureUnit',
|
||||||
@@ -69,6 +60,27 @@ class ConfigManager {
|
|||||||
default: 'Metric',
|
default: 'Metric',
|
||||||
scope: 'client',
|
scope: 'client',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'Auto video & image search',
|
||||||
|
key: 'autoMediaSearch',
|
||||||
|
type: 'switch',
|
||||||
|
required: false,
|
||||||
|
description: 'Automatically search for relevant images and videos.',
|
||||||
|
default: true,
|
||||||
|
scope: 'client',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
personalization: [
|
||||||
|
{
|
||||||
|
name: 'System Instructions',
|
||||||
|
key: 'systemInstructions',
|
||||||
|
type: 'textarea',
|
||||||
|
required: false,
|
||||||
|
description: 'Add custom behavior or tone for the model.',
|
||||||
|
placeholder:
|
||||||
|
'e.g., "Respond in a friendly and concise tone" or "Use British English and format answers as bullet points."',
|
||||||
|
scope: 'client',
|
||||||
|
},
|
||||||
],
|
],
|
||||||
modelProviders: [],
|
modelProviders: [],
|
||||||
search: [
|
search: [
|
||||||
|
|||||||
@@ -38,11 +38,17 @@ type TextareaUIConfigField = BaseUIConfigField & {
|
|||||||
default?: string;
|
default?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type SwitchUIConfigField = BaseUIConfigField & {
|
||||||
|
type: 'switch';
|
||||||
|
default?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
type UIConfigField =
|
type UIConfigField =
|
||||||
| StringUIConfigField
|
| StringUIConfigField
|
||||||
| SelectUIConfigField
|
| SelectUIConfigField
|
||||||
| PasswordUIConfigField
|
| PasswordUIConfigField
|
||||||
| TextareaUIConfigField;
|
| TextareaUIConfigField
|
||||||
|
| SwitchUIConfigField;
|
||||||
|
|
||||||
type ConfigModelProvider = {
|
type ConfigModelProvider = {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -57,7 +63,10 @@ type ConfigModelProvider = {
|
|||||||
type Config = {
|
type Config = {
|
||||||
version: number;
|
version: number;
|
||||||
setupComplete: boolean;
|
setupComplete: boolean;
|
||||||
general: {
|
preferences: {
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
|
personalization: {
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
};
|
};
|
||||||
modelProviders: ConfigModelProvider[];
|
modelProviders: ConfigModelProvider[];
|
||||||
@@ -80,7 +89,8 @@ type ModelProviderUISection = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
type UIConfigSections = {
|
type UIConfigSections = {
|
||||||
general: UIConfigField[];
|
preferences: UIConfigField[];
|
||||||
|
personalization: UIConfigField[];
|
||||||
modelProviders: ModelProviderUISection[];
|
modelProviders: ModelProviderUISection[];
|
||||||
search: UIConfigField[];
|
search: UIConfigField[];
|
||||||
};
|
};
|
||||||
@@ -95,4 +105,5 @@ export type {
|
|||||||
ModelProviderUISection,
|
ModelProviderUISection,
|
||||||
ConfigModelProvider,
|
ConfigModelProvider,
|
||||||
TextareaUIConfigField,
|
TextareaUIConfigField,
|
||||||
|
SwitchUIConfigField,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -17,10 +17,11 @@ import {
|
|||||||
useState,
|
useState,
|
||||||
} from 'react';
|
} from 'react';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { useSearchParams } from 'next/navigation';
|
import { useParams, useSearchParams } from 'next/navigation';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { getSuggestions } from '../actions';
|
import { getSuggestions } from '../actions';
|
||||||
import { MinimalProvider } from '../models/types';
|
import { MinimalProvider } from '../models/types';
|
||||||
|
import { getAutoMediaSearch } from '../config/clientRegistry';
|
||||||
|
|
||||||
export type Section = {
|
export type Section = {
|
||||||
userMessage: UserMessage;
|
userMessage: UserMessage;
|
||||||
@@ -48,6 +49,8 @@ type ChatContext = {
|
|||||||
messageAppeared: boolean;
|
messageAppeared: boolean;
|
||||||
isReady: boolean;
|
isReady: boolean;
|
||||||
hasError: boolean;
|
hasError: boolean;
|
||||||
|
chatModelProvider: ChatModelProvider;
|
||||||
|
embeddingModelProvider: EmbeddingModelProvider;
|
||||||
setOptimizationMode: (mode: string) => void;
|
setOptimizationMode: (mode: string) => void;
|
||||||
setFocusMode: (mode: string) => void;
|
setFocusMode: (mode: string) => void;
|
||||||
setFiles: (files: File[]) => void;
|
setFiles: (files: File[]) => void;
|
||||||
@@ -58,6 +61,8 @@ type ChatContext = {
|
|||||||
rewrite?: boolean,
|
rewrite?: boolean,
|
||||||
) => Promise<void>;
|
) => Promise<void>;
|
||||||
rewrite: (messageId: string) => void;
|
rewrite: (messageId: string) => void;
|
||||||
|
setChatModelProvider: (provider: ChatModelProvider) => void;
|
||||||
|
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface File {
|
export interface File {
|
||||||
@@ -90,17 +95,6 @@ const checkConfig = async (
|
|||||||
'embeddingModelProviderId',
|
'embeddingModelProviderId',
|
||||||
);
|
);
|
||||||
|
|
||||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
|
||||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
|
||||||
|
|
||||||
if (!autoImageSearch) {
|
|
||||||
localStorage.setItem('autoImageSearch', 'true');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!autoVideoSearch) {
|
|
||||||
localStorage.setItem('autoVideoSearch', 'false');
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/providers`, {
|
const res = await fetch(`/api/providers`, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -256,25 +250,24 @@ export const chatContext = createContext<ChatContext>({
|
|||||||
sections: [],
|
sections: [],
|
||||||
notFound: false,
|
notFound: false,
|
||||||
optimizationMode: '',
|
optimizationMode: '',
|
||||||
|
chatModelProvider: { key: '', providerId: '' },
|
||||||
|
embeddingModelProvider: { key: '', providerId: '' },
|
||||||
rewrite: () => {},
|
rewrite: () => {},
|
||||||
sendMessage: async () => {},
|
sendMessage: async () => {},
|
||||||
setFileIds: () => {},
|
setFileIds: () => {},
|
||||||
setFiles: () => {},
|
setFiles: () => {},
|
||||||
setFocusMode: () => {},
|
setFocusMode: () => {},
|
||||||
setOptimizationMode: () => {},
|
setOptimizationMode: () => {},
|
||||||
|
setChatModelProvider: () => {},
|
||||||
|
setEmbeddingModelProvider: () => {},
|
||||||
});
|
});
|
||||||
|
|
||||||
export const ChatProvider = ({
|
export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||||
children,
|
const params: { chatId: string } = useParams();
|
||||||
id,
|
|
||||||
}: {
|
|
||||||
children: React.ReactNode;
|
|
||||||
id?: string;
|
|
||||||
}) => {
|
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const initialMessage = searchParams.get('q');
|
const initialMessage = searchParams.get('q');
|
||||||
|
|
||||||
const [chatId, setChatId] = useState<string | undefined>(id);
|
const [chatId, setChatId] = useState<string | undefined>(params.chatId);
|
||||||
const [newChatCreated, setNewChatCreated] = useState(false);
|
const [newChatCreated, setNewChatCreated] = useState(false);
|
||||||
|
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
@@ -443,6 +436,19 @@ export const ChatProvider = ({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (params.chatId && params.chatId !== chatId) {
|
||||||
|
setChatId(params.chatId);
|
||||||
|
setMessages([]);
|
||||||
|
setChatHistory([]);
|
||||||
|
setFiles([]);
|
||||||
|
setFileIds([]);
|
||||||
|
setIsMessagesLoaded(false);
|
||||||
|
setNotFound(false);
|
||||||
|
setNewChatCreated(false);
|
||||||
|
}
|
||||||
|
}, [params.chatId, chatId]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (
|
if (
|
||||||
chatId &&
|
chatId &&
|
||||||
@@ -466,7 +472,7 @@ export const ChatProvider = ({
|
|||||||
setChatId(crypto.randomBytes(20).toString('hex'));
|
setChatId(crypto.randomBytes(20).toString('hex'));
|
||||||
}
|
}
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, [chatId, isMessagesLoaded, newChatCreated, messages.length]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
messagesRef.current = messages;
|
messagesRef.current = messages;
|
||||||
@@ -519,7 +525,7 @@ export const ChatProvider = ({
|
|||||||
messageId,
|
messageId,
|
||||||
rewrite = false,
|
rewrite = false,
|
||||||
) => {
|
) => {
|
||||||
if (loading) return;
|
if (loading || !message) return;
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
setMessageAppeared(false);
|
setMessageAppeared(false);
|
||||||
|
|
||||||
@@ -608,16 +614,13 @@ export const ChatProvider = ({
|
|||||||
|
|
||||||
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
|
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
|
||||||
|
|
||||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
const autoMediaSearch = getAutoMediaSearch();
|
||||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
|
||||||
|
|
||||||
if (autoImageSearch === 'true') {
|
if (autoMediaSearch) {
|
||||||
document
|
document
|
||||||
.getElementById(`search-images-${lastMsg.messageId}`)
|
.getElementById(`search-images-${lastMsg.messageId}`)
|
||||||
?.click();
|
?.click();
|
||||||
}
|
|
||||||
|
|
||||||
if (autoVideoSearch === 'true') {
|
|
||||||
document
|
document
|
||||||
.getElementById(`search-videos-${lastMsg.messageId}`)
|
.getElementById(`search-videos-${lastMsg.messageId}`)
|
||||||
?.click();
|
?.click();
|
||||||
@@ -743,6 +746,10 @@ export const ChatProvider = ({
|
|||||||
setOptimizationMode,
|
setOptimizationMode,
|
||||||
rewrite,
|
rewrite,
|
||||||
sendMessage,
|
sendMessage,
|
||||||
|
setChatModelProvider,
|
||||||
|
chatModelProvider,
|
||||||
|
embeddingModelProvider,
|
||||||
|
setEmbeddingModelProvider,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{children}
|
{children}
|
||||||
|
|||||||
@@ -1,76 +0,0 @@
|
|||||||
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings';
|
|
||||||
import { chunkArray } from '@langchain/core/utils/chunk_array';
|
|
||||||
|
|
||||||
export interface HuggingFaceTransformersEmbeddingsParams
|
|
||||||
extends EmbeddingsParams {
|
|
||||||
modelName: string;
|
|
||||||
|
|
||||||
model: string;
|
|
||||||
|
|
||||||
timeout?: number;
|
|
||||||
|
|
||||||
batchSize?: number;
|
|
||||||
|
|
||||||
stripNewLines?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class HuggingFaceTransformersEmbeddings
|
|
||||||
extends Embeddings
|
|
||||||
implements HuggingFaceTransformersEmbeddingsParams
|
|
||||||
{
|
|
||||||
modelName = 'Xenova/all-MiniLM-L6-v2';
|
|
||||||
|
|
||||||
model = 'Xenova/all-MiniLM-L6-v2';
|
|
||||||
|
|
||||||
batchSize = 512;
|
|
||||||
|
|
||||||
stripNewLines = true;
|
|
||||||
|
|
||||||
timeout?: number;
|
|
||||||
|
|
||||||
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParams>) {
|
|
||||||
super(fields ?? {});
|
|
||||||
|
|
||||||
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
||||||
this.model = this.modelName;
|
|
||||||
this.stripNewLines = fields?.stripNewLines ?? this.stripNewLines;
|
|
||||||
this.timeout = fields?.timeout;
|
|
||||||
}
|
|
||||||
|
|
||||||
async embedDocuments(texts: string[]): Promise<number[][]> {
|
|
||||||
const batches = chunkArray(
|
|
||||||
this.stripNewLines ? texts.map((t) => t.replace(/\n/g, ' ')) : texts,
|
|
||||||
this.batchSize,
|
|
||||||
);
|
|
||||||
|
|
||||||
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
|
|
||||||
const batchResponses = await Promise.all(batchRequests);
|
|
||||||
const embeddings: number[][] = [];
|
|
||||||
|
|
||||||
for (let i = 0; i < batchResponses.length; i += 1) {
|
|
||||||
const batchResponse = batchResponses[i];
|
|
||||||
for (let j = 0; j < batchResponse.length; j += 1) {
|
|
||||||
embeddings.push(batchResponse[j]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return embeddings;
|
|
||||||
}
|
|
||||||
|
|
||||||
async embedQuery(text: string): Promise<number[]> {
|
|
||||||
const data = await this.runEmbedding([
|
|
||||||
this.stripNewLines ? text.replace(/\n/g, ' ') : text,
|
|
||||||
]);
|
|
||||||
return data[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
private async runEmbedding(texts: string[]) {
|
|
||||||
const { pipeline } = await import('@huggingface/transformers');
|
|
||||||
const pipe = await pipeline('feature-extraction', this.model);
|
|
||||||
|
|
||||||
return this.caller.call(async () => {
|
|
||||||
const output = await pipe(texts, { pooling: 'mean', normalize: true });
|
|
||||||
return output.tolist();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -48,7 +48,12 @@ class GeminiProvider extends BaseModelProvider<GeminiConfig> {
|
|||||||
let defaultChatModels: Model[] = [];
|
let defaultChatModels: Model[] = [];
|
||||||
|
|
||||||
data.models.forEach((m: any) => {
|
data.models.forEach((m: any) => {
|
||||||
if (m.supportedGenerationMethods.includes('embedText')) {
|
if (
|
||||||
|
m.supportedGenerationMethods.some(
|
||||||
|
(genMethod: string) =>
|
||||||
|
genMethod === 'embedText' || genMethod === 'embedContent',
|
||||||
|
)
|
||||||
|
) {
|
||||||
defaultEmbeddingModels.push({
|
defaultEmbeddingModels.push({
|
||||||
key: m.name,
|
key: m.name,
|
||||||
name: m.displayName,
|
name: m.displayName,
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ import BaseModelProvider from './baseProvider';
|
|||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
import { UIConfigField } from '@/lib/config/types';
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||||
import { HuggingFaceTransformersEmbeddings } from '@/lib/huggingfaceTransformer';
|
import { HuggingFaceTransformersEmbeddings } from '@langchain/community/embeddings/huggingface_transformers';
|
||||||
|
|
||||||
interface TransformersConfig {}
|
interface TransformersConfig {}
|
||||||
|
|
||||||
const defaultEmbeddingModels: Model[] = [
|
const defaultEmbeddingModels: Model[] = [
|
||||||
|
|||||||
26
src/lib/prompts/media/image.ts
Normal file
26
src/lib/prompts/media/image.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { BaseMessageLike } from "@langchain/core/messages";
|
||||||
|
|
||||||
|
export const imageSearchPrompt = `
|
||||||
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
||||||
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
|
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const imageSearchFewShots: BaseMessageLike[] = [
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>A cat</query>'],
|
||||||
|
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>Car working</query>'],
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>AC working</query>']
|
||||||
|
]
|
||||||
25
src/lib/prompts/media/videos.ts
Normal file
25
src/lib/prompts/media/videos.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { BaseMessageLike } from "@langchain/core/messages";
|
||||||
|
|
||||||
|
export const videoSearchPrompt = `
|
||||||
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||||
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
|
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const videoSearchFewShots: BaseMessageLike[] = [
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>How does a car work?</query>'],
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>Theory of relativity</query>'],
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>AC working</query>'],
|
||||||
|
]
|
||||||
15
src/lib/prompts/suggestions/index.ts
Normal file
15
src/lib/prompts/suggestions/index.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
export const suggestionGeneratorPrompt = `
|
||||||
|
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
|
||||||
|
You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information.
|
||||||
|
Make sure the suggestions are medium in length and are informative and relevant to the conversation.
|
||||||
|
|
||||||
|
Provide these suggestions separated by newlines between the XML tags <suggestions> and </suggestions>. For example:
|
||||||
|
|
||||||
|
<suggestions>
|
||||||
|
Tell me more about SpaceX and their recent projects
|
||||||
|
What is the latest news on SpaceX?
|
||||||
|
Who is the CEO of SpaceX?
|
||||||
|
</suggestions>
|
||||||
|
|
||||||
|
Today's date is ${new Date().toISOString()}
|
||||||
|
`;
|
||||||
141
yarn.lock
141
yarn.lock
@@ -746,19 +746,19 @@
|
|||||||
"@jridgewell/resolve-uri" "^3.1.0"
|
"@jridgewell/resolve-uri" "^3.1.0"
|
||||||
"@jridgewell/sourcemap-codec" "^1.4.14"
|
"@jridgewell/sourcemap-codec" "^1.4.14"
|
||||||
|
|
||||||
"@langchain/anthropic@^1.0.0":
|
"@langchain/anthropic@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-1.0.0.tgz#48535c5682851bf8fddcf37aa7ca78d4d93da932"
|
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-1.0.1.tgz#a9f836b11ecbce282fc2afb8d707c52fd37711c4"
|
||||||
integrity sha512-Lud/FrkFmXMYW5R9y0FC+RGdgjBBVQ2JAnG3A8E1I4+sqv5JgJttw3HKRpFkyBUSyacs6LMfSn5dbJ6TT9nMiQ==
|
integrity sha512-yVKePAT+nNHtybyyPlWqiq6lqcoDlIuMgL9B4WMEU5gbmzL170iodiqcgcZNFQLOC1V2wCOzywq6Zr0kB24AFg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@anthropic-ai/sdk" "^0.65.0"
|
"@anthropic-ai/sdk" "^0.65.0"
|
||||||
|
|
||||||
"@langchain/classic@1.0.0":
|
"@langchain/classic@1.0.3":
|
||||||
version "1.0.0"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/classic/-/classic-1.0.0.tgz#acbc15eebba03499cf93e73d2c93703a3da0a46e"
|
resolved "https://registry.yarnpkg.com/@langchain/classic/-/classic-1.0.3.tgz#92482cb4cb8692407b4ecde0df312f035934472f"
|
||||||
integrity sha512-darZFvO5g5e3TqZ4rvZ938F94D4a34v2ZdWfyipmyu7WB4RXMshmYtWCp98o4ec3bfRD9S4+oHMmaPcnk5cs5A==
|
integrity sha512-XyoaiJSi4y7SzrZMCb3DdDfC+M3gqIQpVH2cOCh9xQf4244jNrncpLXF/MwOJYWxzTsjfcCAHIbFJ0kSH5nqmg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/openai" "1.0.0-alpha.3"
|
"@langchain/openai" "1.1.1"
|
||||||
"@langchain/textsplitters" "1.0.0"
|
"@langchain/textsplitters" "1.0.0"
|
||||||
handlebars "^4.7.8"
|
handlebars "^4.7.8"
|
||||||
js-yaml "^4.1.0"
|
js-yaml "^4.1.0"
|
||||||
@@ -771,24 +771,24 @@
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
langsmith "^0.3.64"
|
langsmith "^0.3.64"
|
||||||
|
|
||||||
"@langchain/community@^1.0.0":
|
"@langchain/community@^1.0.3":
|
||||||
version "1.0.0"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-1.0.0.tgz#8e587605b7c981882e20281aa9e644a166620145"
|
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-1.0.3.tgz#278c82eee22ff37b120e182b07b7c23ffc6786ab"
|
||||||
integrity sha512-CM4vUZHaFHq8HpWBMIWPO5bo/rmRPJ1/iaJk7s8CghkkQ0WLaZzDtoG/wJKJZMDJOUVCtZKTw+TytlGu00/9dg==
|
integrity sha512-86L7qooSY8Fh5Sf2Tu/X8PvDJqvEXohyZUGusuv0XtnWGivwtecBm0vEbVPkLF07I2ZMtyAGzHJOblbveq6Nmg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/classic" "1.0.0"
|
"@langchain/classic" "1.0.3"
|
||||||
"@langchain/openai" "1.0.0"
|
"@langchain/openai" "1.1.1"
|
||||||
binary-extensions "^2.2.0"
|
binary-extensions "^2.2.0"
|
||||||
expr-eval "^2.0.2"
|
|
||||||
flat "^5.0.2"
|
flat "^5.0.2"
|
||||||
js-yaml "^4.1.0"
|
js-yaml "^4.1.0"
|
||||||
|
math-expression-evaluator "^2.0.0"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
|
|
||||||
"@langchain/core@^1.0.1":
|
"@langchain/core@^1.0.5":
|
||||||
version "1.0.1"
|
version "1.0.5"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-1.0.1.tgz#c2bdbdff87649fe17b2c86bf535d749ac73a586c"
|
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-1.0.5.tgz#1e20ecce80fa4d0b979ea05b24b879b8357d8092"
|
||||||
integrity sha512-hVM3EkojYOk4ISJQKjLuWYSH6kyyOFlZIrLFETDA1L0Z2/Iu0q32aJawZ0FDn6rlXE8QZjBt/9OaOL36rXc05w==
|
integrity sha512-9Hy/b9+j+mm0Bhnm8xD9B0KpBYTidroLrDHdbrHoMC2DqXoY2umvi1M3M/9D744qsMSaIMP0ZwFcy5YbqI/dGw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@cfworker/json-schema" "^4.0.2"
|
"@cfworker/json-schema" "^4.0.2"
|
||||||
ansi-styles "^5.0.0"
|
ansi-styles "^5.0.0"
|
||||||
@@ -802,18 +802,18 @@
|
|||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
|
|
||||||
"@langchain/google-genai@^1.0.0":
|
"@langchain/google-genai@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-1.0.0.tgz#2785fa163788cb6214dffc1dc29fcd5bbb751493"
|
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-1.0.1.tgz#3601645f652f24e3beb55acc61878070b49c24ed"
|
||||||
integrity sha512-ICUBZl/46nG6+Yhe5v7kp/2TQBGOzqEkpfKPLDeNyJ4x9OOL46xsW3ZZrHJjhGMQuR6/JMmQMTU9kLoYgsd1Tg==
|
integrity sha512-a9Bzaswp1P+eA2V8hAWSBypqjxmH+/zhOY1TBdalQuPQBTRH35jBMVgX3CTTAheAzBUGQtlDD4/dR9tyemDbhw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@google/generative-ai" "^0.24.0"
|
"@google/generative-ai" "^0.24.0"
|
||||||
uuid "^11.1.0"
|
uuid "^11.1.0"
|
||||||
|
|
||||||
"@langchain/groq@^1.0.0":
|
"@langchain/groq@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/groq/-/groq-1.0.0.tgz#413b02158761ff406238467325cd4f9fe0990f3a"
|
resolved "https://registry.yarnpkg.com/@langchain/groq/-/groq-1.0.1.tgz#7ec8822cd2f29eef4ae0f9c20f67268d1924ab96"
|
||||||
integrity sha512-6fG9MEQHNXnxgObFHSPh+BPYyTGcoDnKd+GhI9l96cpHh+QNI+IvypicRCZVSsLdqzRCFHISvBQaH+SP5vgjIw==
|
integrity sha512-vDQzv6A3mjG0/W/7vL4Iq+dnmhSbMHln+b7Rna810trjZzfNPZhAP6omqZyzCKIqjsQYUH4ODLnSUCNiarfYsQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
groq-sdk "^0.19.0"
|
groq-sdk "^0.19.0"
|
||||||
|
|
||||||
@@ -842,30 +842,30 @@
|
|||||||
"@langchain/langgraph-sdk" "~1.0.0"
|
"@langchain/langgraph-sdk" "~1.0.0"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
|
|
||||||
"@langchain/ollama@^1.0.0":
|
"@langchain/langgraph@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-1.0.0.tgz#803c353e9dfb1a9e7b20f1460a6a201fec29bb77"
|
resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-1.0.1.tgz#d0be714653e8a27665f86ea795c5c34189455406"
|
||||||
integrity sha512-zqn6i7haMjvZW4FQWo0GrF4wYL5mLurdL0qoe+moYWYSCGaay4K7e/4dqM5C/MR16/HPFDzFbBRMkni2PDRBgA==
|
integrity sha512-7y8OTDLrHrpJ55Y5x7c7zU2BbqNllXwxM106Xrd+NaQB5CpEb4hbUfIwe4XmhhscKPwvhXAq3tjeUxw9MCiurQ==
|
||||||
|
dependencies:
|
||||||
|
"@langchain/langgraph-checkpoint" "^1.0.0"
|
||||||
|
"@langchain/langgraph-sdk" "~1.0.0"
|
||||||
|
uuid "^10.0.0"
|
||||||
|
|
||||||
|
"@langchain/ollama@^1.0.1":
|
||||||
|
version "1.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-1.0.1.tgz#c63ac6db65110beef4020a5e2b167ad0bc678d33"
|
||||||
|
integrity sha512-Pe32hhTpMvnRlNFJxkdu6r1QzsONGz5uvoLiMU1TpgAUu7EyKr2osymlgjBLqDe2vMKUmqHb+yWRH0IppDBUOg==
|
||||||
dependencies:
|
dependencies:
|
||||||
ollama "^0.5.12"
|
ollama "^0.5.12"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
|
|
||||||
"@langchain/openai@1.0.0", "@langchain/openai@^1.0.0":
|
"@langchain/openai@1.1.1", "@langchain/openai@^1.1.1":
|
||||||
version "1.0.0"
|
version "1.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.0.0.tgz#03b15312286b30ce0149f6052620c6c95b4387bc"
|
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.1.1.tgz#67ddcf54ee7ac402f6b75b4b9e25447e78c56a93"
|
||||||
integrity sha512-olKEUIjb3HBOiD/NR056iGJz4wiN6HhQ/u65YmGWYadWWoKOcGwheBw/FE0x6SH4zDlI3QmP+vMhuQoaww19BQ==
|
integrity sha512-0kUaXejo/sn6QAohWHDaAUapC4CJRkJIajGaWfJC+llSqpDBnmBE1oHg1M2fi1OCeP+ns9SxB6BTsq4Qbiqmig==
|
||||||
dependencies:
|
dependencies:
|
||||||
js-tiktoken "^1.0.12"
|
js-tiktoken "^1.0.12"
|
||||||
openai "^6.3.0"
|
openai "^6.9.0"
|
||||||
zod "^3.25.76 || ^4"
|
|
||||||
|
|
||||||
"@langchain/openai@1.0.0-alpha.3":
|
|
||||||
version "1.0.0-alpha.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.0.0-alpha.3.tgz#35c4e770e3421b75a226087af54fbeff147e201a"
|
|
||||||
integrity sha512-re2NXLYeLatPzoB6YRoFgB1fW6i5ygcLGa7PlNOhi3f93uU1vSlWMgjkO9dcN9ALmr/bhoruqJEn7U0Eva+6/w==
|
|
||||||
dependencies:
|
|
||||||
js-tiktoken "^1.0.12"
|
|
||||||
openai "^6.3.0"
|
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
|
|
||||||
"@langchain/textsplitters@1.0.0", "@langchain/textsplitters@^1.0.0":
|
"@langchain/textsplitters@1.0.0", "@langchain/textsplitters@^1.0.0":
|
||||||
@@ -2607,11 +2607,6 @@ expand-template@^2.0.3:
|
|||||||
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
|
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
|
||||||
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
|
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
|
||||||
|
|
||||||
expr-eval@^2.0.2:
|
|
||||||
version "2.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/expr-eval/-/expr-eval-2.0.2.tgz#fa6f044a7b0c93fde830954eb9c5b0f7fbc7e201"
|
|
||||||
integrity sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg==
|
|
||||||
|
|
||||||
fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
|
fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
|
||||||
version "3.1.3"
|
version "3.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
|
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
|
||||||
@@ -3514,17 +3509,16 @@ kuler@^2.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
|
resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
|
||||||
integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
|
integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
|
||||||
|
|
||||||
langchain@^1.0.1:
|
langchain@^1.0.4:
|
||||||
version "1.0.1"
|
version "1.0.4"
|
||||||
resolved "https://registry.yarnpkg.com/langchain/-/langchain-1.0.1.tgz#fb181176f4aa443ef02e9e5b563bcb4e170dfeb6"
|
resolved "https://registry.yarnpkg.com/langchain/-/langchain-1.0.4.tgz#c4fa22d927f41d56c356ecfccea5c08ae7b682ef"
|
||||||
integrity sha512-IT4JBVbKBh2AjaUFT9OsmOfeK3UbKy3SgdzZOuvet25sAaMpAR8IaM9XVddRs+OXQqVg6sOS01KUUVCJksVhHg==
|
integrity sha512-g7z2kKvnXOecybbVGHfI2ZmdmP309mxC1FYlq6WC/7RsKgX5MwY9gBjwK16mpKOaozOD9QCo1Ia7o2UcUBRb9Q==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/langgraph" "^1.0.0"
|
"@langchain/langgraph" "^1.0.0"
|
||||||
"@langchain/langgraph-checkpoint" "^1.0.0"
|
"@langchain/langgraph-checkpoint" "^1.0.0"
|
||||||
|
langsmith "~0.3.74"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
optionalDependencies:
|
|
||||||
langsmith "^0.3.64"
|
|
||||||
|
|
||||||
langsmith@^0.3.64:
|
langsmith@^0.3.64:
|
||||||
version "0.3.74"
|
version "0.3.74"
|
||||||
@@ -3539,6 +3533,19 @@ langsmith@^0.3.64:
|
|||||||
semver "^7.6.3"
|
semver "^7.6.3"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
|
|
||||||
|
langsmith@~0.3.74:
|
||||||
|
version "0.3.79"
|
||||||
|
resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.3.79.tgz#6c845644da26e7fdd8e9b80706091669fc43bda4"
|
||||||
|
integrity sha512-j5uiAsyy90zxlxaMuGjb7EdcL51Yx61SpKfDOI1nMPBbemGju+lf47he4e59Hp5K63CY8XWgFP42WeZ+zuIU4Q==
|
||||||
|
dependencies:
|
||||||
|
"@types/uuid" "^10.0.0"
|
||||||
|
chalk "^4.1.2"
|
||||||
|
console-table-printer "^2.12.1"
|
||||||
|
p-queue "^6.6.2"
|
||||||
|
p-retry "4"
|
||||||
|
semver "^7.6.3"
|
||||||
|
uuid "^10.0.0"
|
||||||
|
|
||||||
language-subtag-registry@^0.3.20:
|
language-subtag-registry@^0.3.20:
|
||||||
version "0.3.22"
|
version "0.3.22"
|
||||||
resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d"
|
resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d"
|
||||||
@@ -3686,6 +3693,11 @@ matcher@^3.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
escape-string-regexp "^4.0.0"
|
escape-string-regexp "^4.0.0"
|
||||||
|
|
||||||
|
math-expression-evaluator@^2.0.0:
|
||||||
|
version "2.0.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-2.0.7.tgz#dc99a80ce2bf7f9b7df878126feb5c506c1fdf5f"
|
||||||
|
integrity sha512-uwliJZ6BPHRq4eiqNWxZBDzKUiS5RIynFFcgchqhBOloVLVBpZpNG8jRYkedLcBvhph8TnRyWEuxPqiQcwIdog==
|
||||||
|
|
||||||
math-intrinsics@^1.1.0:
|
math-intrinsics@^1.1.0:
|
||||||
version "1.1.0"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9"
|
resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9"
|
||||||
@@ -4025,10 +4037,10 @@ onnxruntime-web@1.22.0-dev.20250409-89f8206ba4:
|
|||||||
platform "^1.3.6"
|
platform "^1.3.6"
|
||||||
protobufjs "^7.2.4"
|
protobufjs "^7.2.4"
|
||||||
|
|
||||||
openai@^6.3.0:
|
openai@^6.9.0:
|
||||||
version "6.5.0"
|
version "6.9.0"
|
||||||
resolved "https://registry.yarnpkg.com/openai/-/openai-6.5.0.tgz#7dd9c4c0ca6e394c1d1e738b2000e084024685b2"
|
resolved "https://registry.yarnpkg.com/openai/-/openai-6.9.0.tgz#acd15b2233c42b165981f3de8f4cfce27f844fce"
|
||||||
integrity sha512-bNqJ15Ijbs41KuJ2iYz/mGAruFHzQQt7zXo4EvjNLoB64aJdgn1jlMeDTsXjEg+idVYafg57QB/5Rd16oqvZ6A==
|
integrity sha512-n2sJRYmM+xfJ0l3OfH8eNnIyv3nQY7L08gZQu3dw6wSdfPtKAk92L83M2NIP5SS8Cl/bsBBG3yKzEOjkx0O+7A==
|
||||||
|
|
||||||
openapi-types@^12.1.3:
|
openapi-types@^12.1.3:
|
||||||
version "12.1.3"
|
version "12.1.3"
|
||||||
@@ -5491,12 +5503,7 @@ yocto-queue@^0.1.0:
|
|||||||
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
||||||
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
|
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
|
||||||
|
|
||||||
zod@^3.22.4:
|
"zod@^3.25.76 || ^4", zod@^4.1.12:
|
||||||
version "3.22.4"
|
|
||||||
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
|
|
||||||
integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==
|
|
||||||
|
|
||||||
"zod@^3.25.76 || ^4":
|
|
||||||
version "4.1.12"
|
version "4.1.12"
|
||||||
resolved "https://registry.yarnpkg.com/zod/-/zod-4.1.12.tgz#64f1ea53d00eab91853195653b5af9eee68970f0"
|
resolved "https://registry.yarnpkg.com/zod/-/zod-4.1.12.tgz#64f1ea53d00eab91853195653b5af9eee68970f0"
|
||||||
integrity sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==
|
integrity sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==
|
||||||
|
|||||||
Reference in New Issue
Block a user