mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-24 14:08:14 +00:00
Compare commits
12 Commits
v1.11.0
...
f6ffa9ebe0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6ffa9ebe0 | ||
|
|
f9e675823b | ||
|
|
295334b195 | ||
|
|
b106abd77f | ||
|
|
2d80fc400d | ||
|
|
097a5c55c6 | ||
|
|
d0719429b4 | ||
|
|
600d4ceb29 | ||
|
|
4f50462f1d | ||
|
|
231bc22a36 | ||
|
|
cb1d85e458 | ||
|
|
ce78b4ff62 |
BIN
.assets/demo.gif
Normal file
BIN
.assets/demo.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 53 MiB |
BIN
.assets/sponsers/warp.png
Normal file
BIN
.assets/sponsers/warp.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 433 KiB |
52
Dockerfile
52
Dockerfile
@@ -17,22 +17,11 @@ RUN yarn build
|
||||
|
||||
FROM node:24.5.0-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-venv \
|
||||
python3-dev \
|
||||
sqlite3 \
|
||||
git \
|
||||
build-essential \
|
||||
libxslt-dev \
|
||||
zlib1g-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
uwsgi \
|
||||
uwsgi-plugin-python3 \
|
||||
curl \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3-dev python3-babel python3-venv python-is-python3 \
|
||||
uwsgi uwsgi-plugin-python3 \
|
||||
git build-essential libxslt-dev zlib1g-dev libffi-dev libssl-dev \
|
||||
curl sudo \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /home/perplexica
|
||||
@@ -45,28 +34,39 @@ COPY drizzle ./drizzle
|
||||
|
||||
RUN mkdir /home/perplexica/uploads
|
||||
|
||||
RUN useradd --system --home-dir /usr/local/searxng --shell /bin/sh searxng
|
||||
|
||||
WORKDIR /usr/local/searxng
|
||||
RUN git clone https://github.com/searxng/searxng.git . && \
|
||||
python3 -m venv venv && \
|
||||
. venv/bin/activate && \
|
||||
pip install --upgrade pip setuptools wheel pyyaml && \
|
||||
pip install -r requirements.txt && \
|
||||
pip install uwsgi
|
||||
RUN useradd --shell /bin/bash --system \
|
||||
--home-dir "/usr/local/searxng" \
|
||||
--comment 'Privacy-respecting metasearch engine' \
|
||||
searxng
|
||||
|
||||
RUN mkdir "/usr/local/searxng"
|
||||
RUN mkdir -p /etc/searxng
|
||||
RUN chown -R "searxng:searxng" "/usr/local/searxng"
|
||||
|
||||
COPY searxng/settings.yml /etc/searxng/settings.yml
|
||||
COPY searxng/limiter.toml /etc/searxng/limiter.toml
|
||||
COPY searxng/uwsgi.ini /etc/searxng/uwsgi.ini
|
||||
RUN chown -R searxng:searxng /etc/searxng
|
||||
|
||||
RUN chown -R searxng:searxng /usr/local/searxng /etc/searxng
|
||||
USER searxng
|
||||
|
||||
RUN git clone "https://github.com/searxng/searxng" \
|
||||
"/usr/local/searxng/searxng-src"
|
||||
|
||||
RUN python3 -m venv "/usr/local/searxng/searx-pyenv"
|
||||
RUN "/usr/local/searxng/searx-pyenv/bin/pip" install --upgrade pip setuptools wheel pyyaml msgspec
|
||||
RUN cd "/usr/local/searxng/searxng-src" && \
|
||||
"/usr/local/searxng/searx-pyenv/bin/pip" install --use-pep517 --no-build-isolation -e .
|
||||
|
||||
USER root
|
||||
|
||||
WORKDIR /home/perplexica
|
||||
COPY entrypoint.sh ./entrypoint.sh
|
||||
RUN chmod +x ./entrypoint.sh
|
||||
RUN sed -i 's/\r$//' ./entrypoint.sh || true
|
||||
|
||||
RUN echo "searxng ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
EXPOSE 3000 8080
|
||||
|
||||
ENV SEARXNG_API_URL=http://localhost:8080
|
||||
|
||||
111
README.md
111
README.md
@@ -1,74 +1,57 @@
|
||||
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc -->
|
||||
|
||||
<div align="center" markdown="1">
|
||||
<sup>Special thanks to:</sup>
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://www.warp.dev/perplexica">
|
||||
<img alt="Warp sponsorship" width="400" src="https://github.com/user-attachments/assets/775dd593-9b5f-40f1-bf48-479faff4c27b">
|
||||
</a>
|
||||
|
||||
### [Warp, the AI Devtool that lives in your terminal](https://www.warp.dev/perplexica)
|
||||
|
||||
[Available for MacOS, Linux, & Windows](https://www.warp.dev/perplexica)
|
||||
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
# Perplexica 🔍
|
||||
|
||||
[](https://github.com/ItzCrazyKns/Perplexica/stargazers)
|
||||
[](https://github.com/ItzCrazyKns/Perplexica/network/members)
|
||||
[](https://github.com/ItzCrazyKns/Perplexica/watchers)
|
||||
[](https://hub.docker.com/r/itzcrazykns1337/perplexica)
|
||||
[](https://github.com/ItzCrazyKns/Perplexica/blob/master/LICENSE)
|
||||
[](https://github.com/ItzCrazyKns/Perplexica/commits/master)
|
||||
[](https://discord.gg/26aArMy8tT)
|
||||
|
||||

|
||||
Perplexica is a **privacy-focused AI answering engine** that runs entirely on your own hardware. It combines knowledge from the vast internet with support for **local LLMs** (Ollama) and cloud providers (OpenAI, Claude, Groq), delivering accurate answers with **cited sources** while keeping your searches completely private.
|
||||
|
||||
## Table of Contents <!-- omit in toc -->
|
||||
|
||||
- [Overview](#overview)
|
||||
- [Preview](#preview)
|
||||
- [Features](#features)
|
||||
- [Installation](#installation)
|
||||
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
||||
- [Non-Docker Installation](#non-docker-installation)
|
||||
- [Ollama Connection Errors](#ollama-connection-errors)
|
||||
- [Lemonade Connection Errors](#lemonade-connection-errors)
|
||||
- [Using as a Search Engine](#using-as-a-search-engine)
|
||||
- [Using Perplexica's API](#using-perplexicas-api)
|
||||
- [Expose Perplexica to a network](#expose-perplexica-to-network)
|
||||
- [One-Click Deployment](#one-click-deployment)
|
||||
- [Upcoming Features](#upcoming-features)
|
||||
- [Support Us](#support-us)
|
||||
- [Donations](#donations)
|
||||
- [Contribution](#contribution)
|
||||
- [Help and Support](#help-and-support)
|
||||
|
||||
## Overview
|
||||
|
||||
Perplexica is an open-source AI-powered searching tool or an AI-powered search engine that goes deep into the internet to find answers. Inspired by Perplexity AI, it's an open-source option that not just searches the web but understands your questions. It uses advanced machine learning algorithms like similarity searching and embeddings to refine results and provides clear answers with sources cited.
|
||||
|
||||
Using SearxNG to stay current and fully open source, Perplexica ensures you always get the most up-to-date information without compromising your privacy.
|
||||

|
||||
|
||||
Want to know more about its architecture and how it works? You can read it [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/README.md).
|
||||
|
||||
## Preview
|
||||
## ✨ Features
|
||||
|
||||

|
||||
🤖 **Support for all major AI providers** - Use local LLMs through Ollama or connect to OpenAI, Anthropic Claude, Google Gemini, Groq, and more. Mix and match models based on your needs.
|
||||
|
||||
## Features
|
||||
⚡ **Smart search modes** - Choose Balanced Mode for everyday searches, Fast Mode when you need quick answers, or wait for Quality Mode (coming soon) for deep research.
|
||||
|
||||
- **Local LLMs**: You can utilize local LLMs such as Qwen, DeepSeek, Llama, and Mistral.
|
||||
- **Two Main Modes:**
|
||||
- **Copilot Mode:** (In development) Boosts search by generating different queries to find more relevant internet sources. Like normal search instead of just using the context by SearxNG, it visits the top matches and tries to find relevant sources to the user's query directly from the page.
|
||||
- **Normal Mode:** Processes your query and performs a web search.
|
||||
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
|
||||
- **All Mode:** Searches the entire web to find the best results.
|
||||
- **Writing Assistant Mode:** Helpful for writing tasks that do not require searching the web.
|
||||
- **Academic Search Mode:** Finds articles and papers, ideal for academic research.
|
||||
- **YouTube Search Mode:** Finds YouTube videos based on the search query.
|
||||
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
|
||||
- **Reddit Search Mode:** Searches Reddit for discussions and opinions related to the query.
|
||||
- **Current Information:** Some search tools might give you outdated info because they use data from crawling bots and convert them into embeddings and store them in a index. Unlike them, Perplexica uses SearxNG, a metasearch engine to get the results and rerank and get the most relevant source out of it, ensuring you always get the latest information without the overhead of daily data updates.
|
||||
- **API**: Integrate Perplexica into your existing applications and make use of its capibilities.
|
||||
🎯 **Six specialized focus modes** - Get better results with modes designed for specific tasks: Academic papers, YouTube videos, Reddit discussions, Wolfram Alpha calculations, writing assistance, or general web search.
|
||||
|
||||
It has many more features like image and video search. Some of the planned features are mentioned in [upcoming features](#upcoming-features).
|
||||
🔍 **Web search powered by SearxNG** - Access multiple search engines while keeping your identity private. Support for Tavily and Exa coming soon for even better results.
|
||||
|
||||
📷 **Image and video search** - Find visual content alongside text results. Search isn't limited to just articles anymore.
|
||||
|
||||
📄 **File uploads** - Upload documents and ask questions about them. PDFs, text files, images - Perplexica understands them all.
|
||||
|
||||
🌐 **Search specific domains** - Limit your search to specific websites when you know where to look. Perfect for technical documentation or research papers.
|
||||
|
||||
💡 **Smart suggestions** - Get intelligent search suggestions as you type, helping you formulate better queries.
|
||||
|
||||
📚 **Discover** - Browse interesting articles and trending content throughout the day. Stay informed without even searching.
|
||||
|
||||
🕒 **Search history** - Every search is saved locally so you can revisit your discoveries anytime. Your research is never lost.
|
||||
|
||||
✨ **More coming soon** - We're actively developing new features based on community feedback. Join our Discord to help shape Perplexica's future!
|
||||
|
||||
## Sponsors
|
||||
|
||||
Perplexica's development is powered by the generous support of our sponsors. Their contributions help keep this project free, open-source, and accessible to everyone.
|
||||
|
||||
<div align="center">
|
||||
|
||||
|
||||
<a href="https://www.warp.dev/perplexica">
|
||||
<img alt="Warp Terminal" src=".assets/sponsers/warp.png" width="100%">
|
||||
</a>
|
||||
|
||||
**[Warp](https://www.warp.dev/perplexica)** - The AI-powered terminal revolutionizing development workflows
|
||||
|
||||
</div>
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -79,19 +62,19 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
||||
Perplexica can be easily run using Docker. Simply run the following command:
|
||||
|
||||
```bash
|
||||
docker run -p 3000:3000 --name perplexica itzcrazykns1337/perplexica:latest
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
|
||||
```
|
||||
|
||||
This will pull and start the Perplexica container with the bundled SearxNG search engine. Once running, open your browser and navigate to http://localhost:3000. You can then configure your settings (API keys, models, etc.) directly in the setup screen.
|
||||
|
||||
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required.
|
||||
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required. The `-v` flags create persistent volumes for your data and uploaded files.
|
||||
|
||||
#### Using Perplexica with Your Own SearxNG Instance
|
||||
|
||||
If you already have SearxNG running, you can use the slim version of Perplexica:
|
||||
|
||||
```bash
|
||||
docker run -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
```
|
||||
|
||||
**Important**: Make sure your SearxNG instance has:
|
||||
@@ -118,7 +101,7 @@ If you prefer to build from source or need more control:
|
||||
|
||||
```bash
|
||||
docker build -t perplexica .
|
||||
docker run -p 3000:3000 --name perplexica perplexica
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica perplexica
|
||||
```
|
||||
|
||||
5. Access Perplexica at http://localhost:3000 and configure your settings in the setup screen.
|
||||
|
||||
15
docker-compose.yaml
Normal file
15
docker-compose.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
services:
|
||||
perplexica:
|
||||
image: itzcrazykns1337/perplexica:latest
|
||||
ports:
|
||||
- '3000:3000'
|
||||
volumes:
|
||||
- data:/home/perplexica/data
|
||||
- uploads:/home/perplexica/uploads
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
name: 'perplexica-data'
|
||||
uploads:
|
||||
name: 'perplexica-uploads'
|
||||
@@ -17,6 +17,7 @@ Before making search requests, you'll need to get the available providers and th
|
||||
Returns a list of all active providers with their available chat and embedding models.
|
||||
|
||||
**Response Example:**
|
||||
|
||||
```json
|
||||
{
|
||||
"providers": [
|
||||
|
||||
@@ -10,7 +10,7 @@ Simply pull the latest image and restart your container:
|
||||
docker pull itzcrazykns1337/perplexica:latest
|
||||
docker stop perplexica
|
||||
docker rm perplexica
|
||||
docker run -p 3000:3000 --name perplexica itzcrazykns1337/perplexica:latest
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
|
||||
```
|
||||
|
||||
For slim version:
|
||||
@@ -19,7 +19,7 @@ For slim version:
|
||||
docker pull itzcrazykns1337/perplexica:slim-latest
|
||||
docker stop perplexica
|
||||
docker rm perplexica
|
||||
docker run -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
```
|
||||
|
||||
Once updated, go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.
|
||||
|
||||
@@ -1,24 +1,32 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
cd /usr/local/searxng
|
||||
export SEARXNG_SETTINGS_PATH=/etc/searxng/settings.yml
|
||||
|
||||
# Start SearXNG in background with all output redirected to /dev/null
|
||||
/usr/local/searxng/venv/bin/uwsgi \
|
||||
--http-socket 0.0.0.0:8080 \
|
||||
--ini /etc/searxng/uwsgi.ini \
|
||||
--virtualenv /usr/local/searxng/venv \
|
||||
--disable-logging > /dev/null 2>&1 &
|
||||
|
||||
echo "Starting SearXNG..."
|
||||
|
||||
sudo -H -u searxng bash -c "cd /usr/local/searxng/searxng-src && export SEARXNG_SETTINGS_PATH='/etc/searxng/settings.yml' && export FLASK_APP=searx/webapp.py && /usr/local/searxng/searx-pyenv/bin/python -m flask run --host=0.0.0.0 --port=8080" &
|
||||
SEARXNG_PID=$!
|
||||
|
||||
echo "Waiting for SearXNG to be ready..."
|
||||
sleep 5
|
||||
|
||||
COUNTER=0
|
||||
MAX_TRIES=30
|
||||
until curl -s http://localhost:8080 > /dev/null 2>&1; do
|
||||
COUNTER=$((COUNTER+1))
|
||||
if [ $COUNTER -ge $MAX_TRIES ]; then
|
||||
echo "Warning: SearXNG health check timeout, but continuing..."
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "SearXNG started successfully"
|
||||
|
||||
if curl -s http://localhost:8080 > /dev/null 2>&1; then
|
||||
echo "SearXNG started successfully (PID: $SEARXNG_PID)"
|
||||
else
|
||||
echo "SearXNG may not be fully ready, but continuing (PID: $SEARXNG_PID)"
|
||||
fi
|
||||
|
||||
cd /home/perplexica
|
||||
echo "Starting Perplexica..."
|
||||
|
||||
exec node server.js
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "perplexica-frontend",
|
||||
"version": "1.11.0",
|
||||
"version": "1.11.2",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,17 +1,10 @@
|
||||
'use client';
|
||||
|
||||
import ChatWindow from '@/components/ChatWindow';
|
||||
import { useParams } from 'next/navigation';
|
||||
import React from 'react';
|
||||
import { ChatProvider } from '@/lib/hooks/useChat';
|
||||
|
||||
const Page = () => {
|
||||
const { chatId }: { chatId: string } = useParams();
|
||||
return (
|
||||
<ChatProvider id={chatId}>
|
||||
<ChatWindow />
|
||||
</ChatProvider>
|
||||
);
|
||||
return <ChatWindow />;
|
||||
};
|
||||
|
||||
export default Page;
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Toaster } from 'sonner';
|
||||
import ThemeProvider from '@/components/theme/Provider';
|
||||
import configManager from '@/lib/config';
|
||||
import SetupWizard from '@/components/Setup/SetupWizard';
|
||||
import { ChatProvider } from '@/lib/hooks/useChat';
|
||||
|
||||
const montserrat = Montserrat({
|
||||
weight: ['300', '400', '500', '700'],
|
||||
@@ -36,7 +37,7 @@ export default function RootLayout({
|
||||
<body className={cn('h-full', montserrat.className)}>
|
||||
<ThemeProvider>
|
||||
{setupComplete ? (
|
||||
<>
|
||||
<ChatProvider>
|
||||
<Sidebar>{children}</Sidebar>
|
||||
<Toaster
|
||||
toastOptions={{
|
||||
@@ -47,7 +48,7 @@ export default function RootLayout({
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
</ChatProvider>
|
||||
) : (
|
||||
<SetupWizard configSections={configSections} />
|
||||
)}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import ChatWindow from '@/components/ChatWindow';
|
||||
import { ChatProvider } from '@/lib/hooks/useChat';
|
||||
import { Metadata } from 'next';
|
||||
import { Suspense } from 'react';
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'Chat - Perplexica',
|
||||
@@ -9,15 +7,7 @@ export const metadata: Metadata = {
|
||||
};
|
||||
|
||||
const Home = () => {
|
||||
return (
|
||||
<div>
|
||||
<Suspense>
|
||||
<ChatProvider>
|
||||
<ChatWindow />
|
||||
</ChatProvider>
|
||||
</Suspense>
|
||||
</div>
|
||||
);
|
||||
return <ChatWindow />;
|
||||
};
|
||||
|
||||
export default Home;
|
||||
|
||||
@@ -9,6 +9,7 @@ import Link from 'next/link';
|
||||
import NextError from 'next/error';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import Loader from './ui/Loader';
|
||||
import SettingsButtonMobile from './Settings/SettingsButtonMobile';
|
||||
|
||||
export interface BaseMessage {
|
||||
chatId: string;
|
||||
@@ -56,9 +57,7 @@ const ChatWindow = () => {
|
||||
return (
|
||||
<div className="relative">
|
||||
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
|
||||
<Link href="/settings">
|
||||
<Settings className="cursor-pointer lg:hidden" />
|
||||
</Link>
|
||||
<SettingsButtonMobile />
|
||||
</div>
|
||||
<div className="flex flex-col items-center justify-center min-h-screen">
|
||||
<p className="dark:text-white/70 text-black/70 text-sm">
|
||||
|
||||
@@ -8,17 +8,16 @@ import {
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import { Fragment, useEffect, useState } from 'react';
|
||||
import { Fragment, useEffect, useMemo, useState } from 'react';
|
||||
import { MinimalProvider } from '@/lib/models/types';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
|
||||
const ModelSelector = () => {
|
||||
const [providers, setProviders] = useState<MinimalProvider[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [selectedModel, setSelectedModel] = useState<{
|
||||
providerId: string;
|
||||
modelKey: string;
|
||||
} | null>(null);
|
||||
|
||||
const { setChatModelProvider, chatModelProvider } = useChat();
|
||||
|
||||
useEffect(() => {
|
||||
const loadProviders = async () => {
|
||||
@@ -30,28 +29,8 @@ const ModelSelector = () => {
|
||||
throw new Error('Failed to fetch providers');
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
setProviders(data.providers || []);
|
||||
|
||||
const savedProviderId = localStorage.getItem('chatModelProviderId');
|
||||
const savedModelKey = localStorage.getItem('chatModelKey');
|
||||
|
||||
if (savedProviderId && savedModelKey) {
|
||||
setSelectedModel({
|
||||
providerId: savedProviderId,
|
||||
modelKey: savedModelKey,
|
||||
});
|
||||
} else if (data.providers && data.providers.length > 0) {
|
||||
const firstProvider = data.providers.find(
|
||||
(p: MinimalProvider) => p.chatModels.length > 0,
|
||||
);
|
||||
if (firstProvider && firstProvider.chatModels[0]) {
|
||||
setSelectedModel({
|
||||
providerId: firstProvider.id,
|
||||
modelKey: firstProvider.chatModels[0].key,
|
||||
});
|
||||
}
|
||||
}
|
||||
const data: { providers: MinimalProvider[] } = await res.json();
|
||||
setProviders(data.providers);
|
||||
} catch (error) {
|
||||
console.error('Error loading providers:', error);
|
||||
} finally {
|
||||
@@ -62,13 +41,32 @@ const ModelSelector = () => {
|
||||
loadProviders();
|
||||
}, []);
|
||||
|
||||
const orderedProviders = useMemo(() => {
|
||||
if (!chatModelProvider?.providerId) return providers;
|
||||
|
||||
const currentProviderIndex = providers.findIndex(
|
||||
(p) => p.id === chatModelProvider.providerId,
|
||||
);
|
||||
|
||||
if (currentProviderIndex === -1) {
|
||||
return providers;
|
||||
}
|
||||
|
||||
const selectedProvider = providers[currentProviderIndex];
|
||||
const remainingProviders = providers.filter(
|
||||
(_, index) => index !== currentProviderIndex,
|
||||
);
|
||||
|
||||
return [selectedProvider, ...remainingProviders];
|
||||
}, [providers, chatModelProvider]);
|
||||
|
||||
const handleModelSelect = (providerId: string, modelKey: string) => {
|
||||
setSelectedModel({ providerId, modelKey });
|
||||
setChatModelProvider({ providerId, key: modelKey });
|
||||
localStorage.setItem('chatModelProviderId', providerId);
|
||||
localStorage.setItem('chatModelKey', modelKey);
|
||||
};
|
||||
|
||||
const filteredProviders = providers
|
||||
const filteredProviders = orderedProviders
|
||||
.map((provider) => ({
|
||||
...provider,
|
||||
chatModels: provider.chatModels.filter(
|
||||
@@ -140,15 +138,16 @@ const ModelSelector = () => {
|
||||
|
||||
<div className="flex flex-col px-2 py-2 space-y-0.5">
|
||||
{provider.chatModels.map((model) => (
|
||||
<PopoverButton
|
||||
<button
|
||||
key={model.key}
|
||||
onClick={() =>
|
||||
handleModelSelect(provider.id, model.key)
|
||||
}
|
||||
type="button"
|
||||
className={cn(
|
||||
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
|
||||
selectedModel?.providerId === provider.id &&
|
||||
selectedModel?.modelKey === model.key
|
||||
chatModelProvider?.providerId === provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
@@ -158,8 +157,9 @@ const ModelSelector = () => {
|
||||
size={15}
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
selectedModel?.providerId === provider.id &&
|
||||
selectedModel?.modelKey === model.key
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'text-sky-500'
|
||||
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
|
||||
)}
|
||||
@@ -167,8 +167,9 @@ const ModelSelector = () => {
|
||||
<p
|
||||
className={cn(
|
||||
'text-sm truncate',
|
||||
selectedModel?.providerId === provider.id &&
|
||||
selectedModel?.modelKey === model.key
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'text-sky-500 font-medium'
|
||||
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
|
||||
)}
|
||||
@@ -176,7 +177,7 @@ const ModelSelector = () => {
|
||||
{model.name}
|
||||
</p>
|
||||
</div>
|
||||
</PopoverButton>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -112,100 +112,96 @@ const ModelProvider = ({
|
||||
>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex flex-col gap-y-4 px-5 py-4">
|
||||
{modelProvider.chatModels.length > 0 && (
|
||||
<div className="flex flex-col gap-y-2">
|
||||
<div className="flex flex-row w-full justify-between items-center">
|
||||
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
|
||||
Chat models
|
||||
</p>
|
||||
<AddModel
|
||||
providerId={modelProvider.id}
|
||||
setProviders={setProviders}
|
||||
type="chat"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
|
||||
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||
<AlertCircle size={16} className="shrink-0" />
|
||||
<span className="break-words">
|
||||
{
|
||||
modelProvider.chatModels.find(
|
||||
(m) => m.key === 'error',
|
||||
)?.name
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row flex-wrap gap-2">
|
||||
{modelProvider.chatModels.map((model, index) => (
|
||||
<div
|
||||
key={`${modelProvider.id}-chat-${model.key}-${index}`}
|
||||
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
||||
>
|
||||
<span>{model.name}</span>
|
||||
<button
|
||||
onClick={() => {
|
||||
handleModelDelete('chat', model.key);
|
||||
}}
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-2">
|
||||
<div className="flex flex-row w-full justify-between items-center">
|
||||
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
|
||||
Chat models
|
||||
</p>
|
||||
<AddModel
|
||||
providerId={modelProvider.id}
|
||||
setProviders={setProviders}
|
||||
type="chat"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{modelProvider.embeddingModels.length > 0 && (
|
||||
<div className="flex flex-col gap-y-2">
|
||||
<div className="flex flex-row w-full justify-between items-center">
|
||||
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
|
||||
Embedding models
|
||||
</p>
|
||||
<AddModel
|
||||
providerId={modelProvider.id}
|
||||
setProviders={setProviders}
|
||||
type="embedding"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
{modelProvider.embeddingModels.some(
|
||||
(m) => m.key === 'error',
|
||||
) ? (
|
||||
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||
<AlertCircle size={16} className="shrink-0" />
|
||||
<span className="break-words">
|
||||
{
|
||||
modelProvider.embeddingModels.find(
|
||||
(m) => m.key === 'error',
|
||||
)?.name
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row flex-wrap gap-2">
|
||||
{modelProvider.embeddingModels.map((model, index) => (
|
||||
<div
|
||||
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
|
||||
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
||||
<div className="flex flex-col gap-2">
|
||||
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
|
||||
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||
<AlertCircle size={16} className="shrink-0" />
|
||||
<span className="break-words">
|
||||
{
|
||||
modelProvider.chatModels.find(
|
||||
(m) => m.key === 'error',
|
||||
)?.name
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row flex-wrap gap-2">
|
||||
{modelProvider.chatModels.map((model, index) => (
|
||||
<div
|
||||
key={`${modelProvider.id}-chat-${model.key}-${index}`}
|
||||
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
||||
>
|
||||
<span>{model.name}</span>
|
||||
<button
|
||||
onClick={() => {
|
||||
handleModelDelete('chat', model.key);
|
||||
}}
|
||||
>
|
||||
<span>{model.name}</span>
|
||||
<button
|
||||
onClick={() => {
|
||||
handleModelDelete('embedding', model.key);
|
||||
}}
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-2">
|
||||
<div className="flex flex-row w-full justify-between items-center">
|
||||
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
|
||||
Embedding models
|
||||
</p>
|
||||
<AddModel
|
||||
providerId={modelProvider.id}
|
||||
setProviders={setProviders}
|
||||
type="embedding"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
{modelProvider.embeddingModels.some(
|
||||
(m) => m.key === 'error',
|
||||
) ? (
|
||||
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||
<AlertCircle size={16} className="shrink-0" />
|
||||
<span className="break-words">
|
||||
{
|
||||
modelProvider.embeddingModels.find(
|
||||
(m) => m.key === 'error',
|
||||
)?.name
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row flex-wrap gap-2">
|
||||
{modelProvider.embeddingModels.map((model, index) => (
|
||||
<div
|
||||
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
|
||||
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
||||
>
|
||||
<span>{model.name}</span>
|
||||
<button
|
||||
onClick={() => {
|
||||
handleModelDelete('embedding', model.key);
|
||||
}}
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import Select from '@/components/ui/Select';
|
||||
import { ConfigModelProvider } from '@/lib/config/types';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import { useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
@@ -16,6 +17,7 @@ const ModelSelect = ({
|
||||
: `${localStorage.getItem('embeddingModelProviderId')}/${localStorage.getItem('embeddingModelKey')}`,
|
||||
);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { setChatModelProvider, setEmbeddingModelProvider } = useChat();
|
||||
|
||||
const handleSave = async (newValue: string) => {
|
||||
setLoading(true);
|
||||
@@ -23,20 +25,27 @@ const ModelSelect = ({
|
||||
|
||||
try {
|
||||
if (type === 'chat') {
|
||||
localStorage.setItem('chatModelProviderId', newValue.split('/')[0]);
|
||||
localStorage.setItem(
|
||||
'chatModelKey',
|
||||
newValue.split('/').slice(1).join('/'),
|
||||
);
|
||||
const providerId = newValue.split('/')[0];
|
||||
const modelKey = newValue.split('/').slice(1).join('/');
|
||||
|
||||
localStorage.setItem('chatModelProviderId', providerId);
|
||||
localStorage.setItem('chatModelKey', modelKey);
|
||||
|
||||
setChatModelProvider({
|
||||
providerId: providerId,
|
||||
key: modelKey,
|
||||
});
|
||||
} else {
|
||||
localStorage.setItem(
|
||||
'embeddingModelProviderId',
|
||||
newValue.split('/')[0],
|
||||
);
|
||||
localStorage.setItem(
|
||||
'embeddingModelKey',
|
||||
newValue.split('/').slice(1).join('/'),
|
||||
);
|
||||
const providerId = newValue.split('/')[0];
|
||||
const modelKey = newValue.split('/').slice(1).join('/');
|
||||
|
||||
localStorage.setItem('embeddingModelProviderId', providerId);
|
||||
localStorage.setItem('embeddingModelKey', modelKey);
|
||||
|
||||
setEmbeddingModelProvider({
|
||||
providerId: providerId,
|
||||
key: modelKey,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving config:', error);
|
||||
|
||||
@@ -9,7 +9,7 @@ const SettingsButtonMobile = () => {
|
||||
return (
|
||||
<>
|
||||
<button className="lg:hidden" onClick={() => setIsOpen(true)}>
|
||||
<Settings size={18}/>
|
||||
<Settings size={18} />
|
||||
</button>
|
||||
<AnimatePresence>
|
||||
{isOpen && <SettingsDialogue isOpen={isOpen} setIsOpen={setIsOpen} />}
|
||||
|
||||
@@ -63,8 +63,7 @@ const SetupConfig = ({
|
||||
}
|
||||
};
|
||||
|
||||
const hasProviders =
|
||||
providers.filter((p) => p.chatModels.length > 0).length > 0;
|
||||
const hasProviders = providers.length > 0;
|
||||
|
||||
return (
|
||||
<div className="w-[95vw] md:w-[80vw] lg:w-[65vw] mx-auto px-2 sm:px-4 md:px-6 flex flex-col space-y-6">
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
useState,
|
||||
} from 'react';
|
||||
import crypto from 'crypto';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
import { useParams, useSearchParams } from 'next/navigation';
|
||||
import { toast } from 'sonner';
|
||||
import { getSuggestions } from '../actions';
|
||||
import { MinimalProvider } from '../models/types';
|
||||
@@ -48,6 +48,8 @@ type ChatContext = {
|
||||
messageAppeared: boolean;
|
||||
isReady: boolean;
|
||||
hasError: boolean;
|
||||
chatModelProvider: ChatModelProvider;
|
||||
embeddingModelProvider: EmbeddingModelProvider;
|
||||
setOptimizationMode: (mode: string) => void;
|
||||
setFocusMode: (mode: string) => void;
|
||||
setFiles: (files: File[]) => void;
|
||||
@@ -58,6 +60,8 @@ type ChatContext = {
|
||||
rewrite?: boolean,
|
||||
) => Promise<void>;
|
||||
rewrite: (messageId: string) => void;
|
||||
setChatModelProvider: (provider: ChatModelProvider) => void;
|
||||
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void;
|
||||
};
|
||||
|
||||
export interface File {
|
||||
@@ -256,25 +260,24 @@ export const chatContext = createContext<ChatContext>({
|
||||
sections: [],
|
||||
notFound: false,
|
||||
optimizationMode: '',
|
||||
chatModelProvider: { key: '', providerId: '' },
|
||||
embeddingModelProvider: { key: '', providerId: '' },
|
||||
rewrite: () => {},
|
||||
sendMessage: async () => {},
|
||||
setFileIds: () => {},
|
||||
setFiles: () => {},
|
||||
setFocusMode: () => {},
|
||||
setOptimizationMode: () => {},
|
||||
setChatModelProvider: () => {},
|
||||
setEmbeddingModelProvider: () => {},
|
||||
});
|
||||
|
||||
export const ChatProvider = ({
|
||||
children,
|
||||
id,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
id?: string;
|
||||
}) => {
|
||||
export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
const params: { chatId: string } = useParams();
|
||||
const searchParams = useSearchParams();
|
||||
const initialMessage = searchParams.get('q');
|
||||
|
||||
const [chatId, setChatId] = useState<string | undefined>(id);
|
||||
const [chatId, setChatId] = useState<string | undefined>(params.chatId);
|
||||
const [newChatCreated, setNewChatCreated] = useState(false);
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
@@ -443,6 +446,19 @@ export const ChatProvider = ({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (params.chatId && params.chatId !== chatId) {
|
||||
setChatId(params.chatId);
|
||||
setMessages([]);
|
||||
setChatHistory([]);
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
setIsMessagesLoaded(false);
|
||||
setNotFound(false);
|
||||
setNewChatCreated(false);
|
||||
}
|
||||
}, [params.chatId, chatId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
chatId &&
|
||||
@@ -466,7 +482,7 @@ export const ChatProvider = ({
|
||||
setChatId(crypto.randomBytes(20).toString('hex'));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
}, [chatId, isMessagesLoaded, newChatCreated, messages.length]);
|
||||
|
||||
useEffect(() => {
|
||||
messagesRef.current = messages;
|
||||
@@ -519,7 +535,7 @@ export const ChatProvider = ({
|
||||
messageId,
|
||||
rewrite = false,
|
||||
) => {
|
||||
if (loading) return;
|
||||
if (loading || !message) return;
|
||||
setLoading(true);
|
||||
setMessageAppeared(false);
|
||||
|
||||
@@ -743,6 +759,10 @@ export const ChatProvider = ({
|
||||
setOptimizationMode,
|
||||
rewrite,
|
||||
sendMessage,
|
||||
setChatModelProvider,
|
||||
chatModelProvider,
|
||||
embeddingModelProvider,
|
||||
setEmbeddingModelProvider,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings';
|
||||
import { chunkArray } from '@langchain/core/utils/chunk_array';
|
||||
|
||||
export interface HuggingFaceTransformersEmbeddingsParams
|
||||
extends EmbeddingsParams {
|
||||
modelName: string;
|
||||
|
||||
model: string;
|
||||
|
||||
timeout?: number;
|
||||
|
||||
batchSize?: number;
|
||||
|
||||
stripNewLines?: boolean;
|
||||
}
|
||||
|
||||
export class HuggingFaceTransformersEmbeddings
|
||||
extends Embeddings
|
||||
implements HuggingFaceTransformersEmbeddingsParams
|
||||
{
|
||||
modelName = 'Xenova/all-MiniLM-L6-v2';
|
||||
|
||||
model = 'Xenova/all-MiniLM-L6-v2';
|
||||
|
||||
batchSize = 512;
|
||||
|
||||
stripNewLines = true;
|
||||
|
||||
timeout?: number;
|
||||
|
||||
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParams>) {
|
||||
super(fields ?? {});
|
||||
|
||||
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
||||
this.model = this.modelName;
|
||||
this.stripNewLines = fields?.stripNewLines ?? this.stripNewLines;
|
||||
this.timeout = fields?.timeout;
|
||||
}
|
||||
|
||||
async embedDocuments(texts: string[]): Promise<number[][]> {
|
||||
const batches = chunkArray(
|
||||
this.stripNewLines ? texts.map((t) => t.replace(/\n/g, ' ')) : texts,
|
||||
this.batchSize,
|
||||
);
|
||||
|
||||
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
|
||||
const batchResponses = await Promise.all(batchRequests);
|
||||
const embeddings: number[][] = [];
|
||||
|
||||
for (let i = 0; i < batchResponses.length; i += 1) {
|
||||
const batchResponse = batchResponses[i];
|
||||
for (let j = 0; j < batchResponse.length; j += 1) {
|
||||
embeddings.push(batchResponse[j]);
|
||||
}
|
||||
}
|
||||
|
||||
return embeddings;
|
||||
}
|
||||
|
||||
async embedQuery(text: string): Promise<number[]> {
|
||||
const data = await this.runEmbedding([
|
||||
this.stripNewLines ? text.replace(/\n/g, ' ') : text,
|
||||
]);
|
||||
return data[0];
|
||||
}
|
||||
|
||||
private async runEmbedding(texts: string[]) {
|
||||
const { pipeline } = await import('@huggingface/transformers');
|
||||
const pipe = await pipeline('feature-extraction', this.model);
|
||||
|
||||
return this.caller.call(async () => {
|
||||
const output = await pipe(texts, { pooling: 'mean', normalize: true });
|
||||
return output.tolist();
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,7 @@ import BaseModelProvider from './baseProvider';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import { HuggingFaceTransformersEmbeddings } from '@/lib/huggingfaceTransformer';
|
||||
|
||||
import { HuggingFaceTransformersEmbeddings } from '@langchain/community/embeddings/huggingface_transformers';
|
||||
interface TransformersConfig {}
|
||||
|
||||
const defaultEmbeddingModels: Model[] = [
|
||||
|
||||
Reference in New Issue
Block a user