Compare commits

..

5 Commits

Author SHA1 Message Date
ItzCrazyKns
600d4ceb29 feat(hf-transformer): use langchain's inbuilt transformer class 2025-10-23 23:06:05 +05:30
ItzCrazyKns
4f50462f1d feat(package): bump version 2025-10-23 21:04:33 +05:30
ItzCrazyKns
231bc22a36 feat(docker): update searxng build script 2025-10-23 19:07:22 +05:30
ItzCrazyKns
cb1d85e458 feat(readme): add volumes 2025-10-21 16:57:57 +05:30
ItzCrazyKns
ce78b4ff62 feat(app): show "add model" button 2025-10-21 16:32:40 +05:30
12 changed files with 158 additions and 216 deletions

View File

@@ -17,22 +17,11 @@ RUN yarn build
FROM node:24.5.0-slim
RUN apt-get update && \
apt-get install -y \
python3 \
python3-pip \
python3-venv \
python3-dev \
sqlite3 \
git \
build-essential \
libxslt-dev \
zlib1g-dev \
libffi-dev \
libssl-dev \
uwsgi \
uwsgi-plugin-python3 \
curl \
RUN apt-get update && apt-get install -y \
python3-dev python3-babel python3-venv python-is-python3 \
uwsgi uwsgi-plugin-python3 \
git build-essential libxslt-dev zlib1g-dev libffi-dev libssl-dev \
curl sudo \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /home/perplexica
@@ -45,28 +34,39 @@ COPY drizzle ./drizzle
RUN mkdir /home/perplexica/uploads
RUN useradd --system --home-dir /usr/local/searxng --shell /bin/sh searxng
WORKDIR /usr/local/searxng
RUN git clone https://github.com/searxng/searxng.git . && \
python3 -m venv venv && \
. venv/bin/activate && \
pip install --upgrade pip setuptools wheel pyyaml && \
pip install -r requirements.txt && \
pip install uwsgi
RUN useradd --shell /bin/bash --system \
--home-dir "/usr/local/searxng" \
--comment 'Privacy-respecting metasearch engine' \
searxng
RUN mkdir "/usr/local/searxng"
RUN mkdir -p /etc/searxng
RUN chown -R "searxng:searxng" "/usr/local/searxng"
COPY searxng/settings.yml /etc/searxng/settings.yml
COPY searxng/limiter.toml /etc/searxng/limiter.toml
COPY searxng/uwsgi.ini /etc/searxng/uwsgi.ini
RUN chown -R searxng:searxng /etc/searxng
RUN chown -R searxng:searxng /usr/local/searxng /etc/searxng
USER searxng
RUN git clone "https://github.com/searxng/searxng" \
"/usr/local/searxng/searxng-src"
RUN python3 -m venv "/usr/local/searxng/searx-pyenv"
RUN "/usr/local/searxng/searx-pyenv/bin/pip" install --upgrade pip setuptools wheel pyyaml msgspec
RUN cd "/usr/local/searxng/searxng-src" && \
"/usr/local/searxng/searx-pyenv/bin/pip" install --use-pep517 --no-build-isolation -e .
USER root
WORKDIR /home/perplexica
COPY entrypoint.sh ./entrypoint.sh
RUN chmod +x ./entrypoint.sh
RUN sed -i 's/\r$//' ./entrypoint.sh || true
RUN echo "searxng ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
EXPOSE 3000 8080
ENV SEARXNG_API_URL=http://localhost:8080

View File

@@ -79,19 +79,19 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
Perplexica can be easily run using Docker. Simply run the following command:
```bash
docker run -p 3000:3000 --name perplexica itzcrazykns1337/perplexica:latest
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
```
This will pull and start the Perplexica container with the bundled SearxNG search engine. Once running, open your browser and navigate to http://localhost:3000. You can then configure your settings (API keys, models, etc.) directly in the setup screen.
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required.
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required. The `-v` flags create persistent volumes for your data and uploaded files.
#### Using Perplexica with Your Own SearxNG Instance
If you already have SearxNG running, you can use the slim version of Perplexica:
```bash
docker run -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 --name perplexica itzcrazykns1337/perplexica:slim-latest
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
```
**Important**: Make sure your SearxNG instance has:
@@ -118,7 +118,7 @@ If you prefer to build from source or need more control:
```bash
docker build -t perplexica .
docker run -p 3000:3000 --name perplexica perplexica
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica perplexica
```
5. Access Perplexica at http://localhost:3000 and configure your settings in the setup screen.

15
docker-compose.yaml Normal file
View File

@@ -0,0 +1,15 @@
services:
perplexica:
image: itzcrazykns1337/perplexica:latest
ports:
- '3000:3000'
volumes:
- data:/home/perplexica/data
- uploads:/home/perplexica/uploads
restart: unless-stopped
volumes:
data:
name: 'perplexica-data'
uploads:
name: 'perplexica-uploads'

View File

@@ -17,6 +17,7 @@ Before making search requests, you'll need to get the available providers and th
Returns a list of all active providers with their available chat and embedding models.
**Response Example:**
```json
{
"providers": [

View File

@@ -10,7 +10,7 @@ Simply pull the latest image and restart your container:
docker pull itzcrazykns1337/perplexica:latest
docker stop perplexica
docker rm perplexica
docker run -p 3000:3000 --name perplexica itzcrazykns1337/perplexica:latest
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
```
For slim version:
@@ -19,7 +19,7 @@ For slim version:
docker pull itzcrazykns1337/perplexica:slim-latest
docker stop perplexica
docker rm perplexica
docker run -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 --name perplexica itzcrazykns1337/perplexica:slim-latest
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
```
Once updated, go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.

View File

@@ -1,24 +1,32 @@
#!/bin/sh
set -e
cd /usr/local/searxng
export SEARXNG_SETTINGS_PATH=/etc/searxng/settings.yml
# Start SearXNG in background with all output redirected to /dev/null
/usr/local/searxng/venv/bin/uwsgi \
--http-socket 0.0.0.0:8080 \
--ini /etc/searxng/uwsgi.ini \
--virtualenv /usr/local/searxng/venv \
--disable-logging > /dev/null 2>&1 &
echo "Starting SearXNG..."
sudo -H -u searxng bash -c "cd /usr/local/searxng/searxng-src && export SEARXNG_SETTINGS_PATH='/etc/searxng/settings.yml' && export FLASK_APP=searx/webapp.py && /usr/local/searxng/searx-pyenv/bin/python -m flask run --host=0.0.0.0 --port=8080" &
SEARXNG_PID=$!
echo "Waiting for SearXNG to be ready..."
sleep 5
COUNTER=0
MAX_TRIES=30
until curl -s http://localhost:8080 > /dev/null 2>&1; do
COUNTER=$((COUNTER+1))
if [ $COUNTER -ge $MAX_TRIES ]; then
echo "Warning: SearXNG health check timeout, but continuing..."
break
fi
sleep 1
done
echo "SearXNG started successfully"
if curl -s http://localhost:8080 > /dev/null 2>&1; then
echo "SearXNG started successfully (PID: $SEARXNG_PID)"
else
echo "SearXNG may not be fully ready, but continuing (PID: $SEARXNG_PID)"
fi
cd /home/perplexica
echo "Starting Perplexica..."
exec node server.js

View File

@@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
"version": "1.11.0",
"version": "1.11.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {

View File

@@ -112,100 +112,96 @@ const ModelProvider = ({
>
<div className="border-t border-light-200 dark:border-dark-200" />
<div className="flex flex-col gap-y-4 px-5 py-4">
{modelProvider.chatModels.length > 0 && (
<div className="flex flex-col gap-y-2">
<div className="flex flex-row w-full justify-between items-center">
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
Chat models
</p>
<AddModel
providerId={modelProvider.id}
setProviders={setProviders}
type="chat"
/>
</div>
<div className="flex flex-col gap-2">
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
<AlertCircle size={16} className="shrink-0" />
<span className="break-words">
{
modelProvider.chatModels.find(
(m) => m.key === 'error',
)?.name
}
</span>
</div>
) : (
<div className="flex flex-row flex-wrap gap-2">
{modelProvider.chatModels.map((model, index) => (
<div
key={`${modelProvider.id}-chat-${model.key}-${index}`}
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
>
<span>{model.name}</span>
<button
onClick={() => {
handleModelDelete('chat', model.key);
}}
>
<X size={12} />
</button>
</div>
))}
</div>
)}
</div>
<div className="flex flex-col gap-y-2">
<div className="flex flex-row w-full justify-between items-center">
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
Chat models
</p>
<AddModel
providerId={modelProvider.id}
setProviders={setProviders}
type="chat"
/>
</div>
)}
{modelProvider.embeddingModels.length > 0 && (
<div className="flex flex-col gap-y-2">
<div className="flex flex-row w-full justify-between items-center">
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
Embedding models
</p>
<AddModel
providerId={modelProvider.id}
setProviders={setProviders}
type="embedding"
/>
</div>
<div className="flex flex-col gap-2">
{modelProvider.embeddingModels.some(
(m) => m.key === 'error',
) ? (
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
<AlertCircle size={16} className="shrink-0" />
<span className="break-words">
{
modelProvider.embeddingModels.find(
(m) => m.key === 'error',
)?.name
}
</span>
</div>
) : (
<div className="flex flex-row flex-wrap gap-2">
{modelProvider.embeddingModels.map((model, index) => (
<div
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
<div className="flex flex-col gap-2">
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
<AlertCircle size={16} className="shrink-0" />
<span className="break-words">
{
modelProvider.chatModels.find(
(m) => m.key === 'error',
)?.name
}
</span>
</div>
) : (
<div className="flex flex-row flex-wrap gap-2">
{modelProvider.chatModels.map((model, index) => (
<div
key={`${modelProvider.id}-chat-${model.key}-${index}`}
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
>
<span>{model.name}</span>
<button
onClick={() => {
handleModelDelete('chat', model.key);
}}
>
<span>{model.name}</span>
<button
onClick={() => {
handleModelDelete('embedding', model.key);
}}
>
<X size={12} />
</button>
</div>
))}
</div>
)}
</div>
<X size={12} />
</button>
</div>
))}
</div>
)}
</div>
)}
</div>
<div className="flex flex-col gap-y-2">
<div className="flex flex-row w-full justify-between items-center">
<p className="text-[11px] lg:text-xs text-black/70 dark:text-white/70">
Embedding models
</p>
<AddModel
providerId={modelProvider.id}
setProviders={setProviders}
type="embedding"
/>
</div>
<div className="flex flex-col gap-2">
{modelProvider.embeddingModels.some(
(m) => m.key === 'error',
) ? (
<div className="flex flex-row items-center gap-2 text-xs lg:text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
<AlertCircle size={16} className="shrink-0" />
<span className="break-words">
{
modelProvider.embeddingModels.find(
(m) => m.key === 'error',
)?.name
}
</span>
</div>
) : (
<div className="flex flex-row flex-wrap gap-2">
{modelProvider.embeddingModels.map((model, index) => (
<div
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
className="flex flex-row items-center space-x-1 text-xs lg:text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
>
<span>{model.name}</span>
<button
onClick={() => {
handleModelDelete('embedding', model.key);
}}
>
<X size={12} />
</button>
</div>
))}
</div>
)}
</div>
</div>
</div>
</motion.div>
)}

View File

@@ -9,7 +9,7 @@ const SettingsButtonMobile = () => {
return (
<>
<button className="lg:hidden" onClick={() => setIsOpen(true)}>
<Settings size={18}/>
<Settings size={18} />
</button>
<AnimatePresence>
{isOpen && <SettingsDialogue isOpen={isOpen} setIsOpen={setIsOpen} />}

View File

@@ -63,8 +63,7 @@ const SetupConfig = ({
}
};
const hasProviders =
providers.filter((p) => p.chatModels.length > 0).length > 0;
const hasProviders = providers.length > 0;
return (
<div className="w-[95vw] md:w-[80vw] lg:w-[65vw] mx-auto px-2 sm:px-4 md:px-6 flex flex-col space-y-6">

View File

@@ -1,76 +0,0 @@
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings';
import { chunkArray } from '@langchain/core/utils/chunk_array';
export interface HuggingFaceTransformersEmbeddingsParams
extends EmbeddingsParams {
modelName: string;
model: string;
timeout?: number;
batchSize?: number;
stripNewLines?: boolean;
}
export class HuggingFaceTransformersEmbeddings
extends Embeddings
implements HuggingFaceTransformersEmbeddingsParams
{
modelName = 'Xenova/all-MiniLM-L6-v2';
model = 'Xenova/all-MiniLM-L6-v2';
batchSize = 512;
stripNewLines = true;
timeout?: number;
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParams>) {
super(fields ?? {});
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
this.model = this.modelName;
this.stripNewLines = fields?.stripNewLines ?? this.stripNewLines;
this.timeout = fields?.timeout;
}
async embedDocuments(texts: string[]): Promise<number[][]> {
const batches = chunkArray(
this.stripNewLines ? texts.map((t) => t.replace(/\n/g, ' ')) : texts,
this.batchSize,
);
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
const batchResponses = await Promise.all(batchRequests);
const embeddings: number[][] = [];
for (let i = 0; i < batchResponses.length; i += 1) {
const batchResponse = batchResponses[i];
for (let j = 0; j < batchResponse.length; j += 1) {
embeddings.push(batchResponse[j]);
}
}
return embeddings;
}
async embedQuery(text: string): Promise<number[]> {
const data = await this.runEmbedding([
this.stripNewLines ? text.replace(/\n/g, ' ') : text,
]);
return data[0];
}
private async runEmbedding(texts: string[]) {
const { pipeline } = await import('@huggingface/transformers');
const pipe = await pipeline('feature-extraction', this.model);
return this.caller.call(async () => {
const output = await pipe(texts, { pooling: 'mean', normalize: true });
return output.tolist();
});
}
}

View File

@@ -4,8 +4,7 @@ import BaseModelProvider from './baseProvider';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import { HuggingFaceTransformersEmbeddings } from '@/lib/huggingfaceTransformer';
import { HuggingFaceTransformersEmbeddings } from '@langchain/community/embeddings/huggingface_transformers';
interface TransformersConfig {}
const defaultEmbeddingModels: Model[] = [