mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-17 14:51:32 +00:00
Compare commits
14 Commits
develop/v1
...
1bb1e7bb72
Author | SHA1 | Date | |
---|---|---|---|
|
1bb1e7bb72 | ||
|
64d2a467b0 | ||
|
9a2c4fe3b6 | ||
|
060c68a900 | ||
|
e6b87f89ec | ||
|
a661450633 | ||
|
b3b8a05bd2 | ||
|
89b5229ce9 | ||
|
7756340dd9 | ||
|
bbd2e9c359 | ||
|
a32eb1dda3 | ||
|
aa834f7f04 | ||
|
064c0fbe42 | ||
|
bf4cf8eaeb |
37
.github/workflows/docker-build.yaml
vendored
37
.github/workflows/docker-build.yaml
vendored
@@ -10,9 +10,6 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
build-and-push:
|
build-and-push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
service: [backend, app]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@@ -36,38 +33,24 @@ jobs:
|
|||||||
id: version
|
id: version
|
||||||
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build and push Docker image for ${{ matrix.service }}
|
- name: Build and push Docker image (latest)
|
||||||
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
|
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
|
||||||
run: |
|
run: |
|
||||||
docker buildx create --use
|
docker buildx create --use
|
||||||
if [[ "${{ matrix.service }}" == "backend" ]]; then \
|
docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 \
|
||||||
DOCKERFILE=backend.dockerfile; \
|
--cache-from=type=registry,ref=itzcrazykns1337/perplexica:latest \
|
||||||
IMAGE_NAME=perplexica-backend; \
|
|
||||||
else \
|
|
||||||
DOCKERFILE=app.dockerfile; \
|
|
||||||
IMAGE_NAME=perplexica-frontend; \
|
|
||||||
fi
|
|
||||||
docker buildx build --platform linux/amd64,linux/arm64 \
|
|
||||||
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:main \
|
|
||||||
--cache-to=type=inline \
|
--cache-to=type=inline \
|
||||||
-f $DOCKERFILE \
|
-f docker/Dockerfile \
|
||||||
-t itzcrazykns1337/${IMAGE_NAME}:main \
|
-t itzcrazykns1337/perplexica:latest \
|
||||||
--push .
|
--push .
|
||||||
|
|
||||||
- name: Build and push release Docker image for ${{ matrix.service }}
|
- name: Build and push Docker image (release)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
run: |
|
run: |
|
||||||
docker buildx create --use
|
docker buildx create --use
|
||||||
if [[ "${{ matrix.service }}" == "backend" ]]; then \
|
docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 \
|
||||||
DOCKERFILE=backend.dockerfile; \
|
--cache-from=type=registry,ref=itzcrazykns1337/perplexica:${{ env.RELEASE_VERSION }} \
|
||||||
IMAGE_NAME=perplexica-backend; \
|
|
||||||
else \
|
|
||||||
DOCKERFILE=app.dockerfile; \
|
|
||||||
IMAGE_NAME=perplexica-frontend; \
|
|
||||||
fi
|
|
||||||
docker buildx build --platform linux/amd64,linux/arm64 \
|
|
||||||
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
|
|
||||||
--cache-to=type=inline \
|
--cache-to=type=inline \
|
||||||
-f $DOCKERFILE \
|
-f docker/Dockerfile \
|
||||||
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
|
-t itzcrazykns1337/perplexica:${{ env.RELEASE_VERSION }} \
|
||||||
--push .
|
--push .
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -2,7 +2,6 @@
|
|||||||
node_modules/
|
node_modules/
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
package-lock.json
|
|
||||||
|
|
||||||
# Build output
|
# Build output
|
||||||
/.next/
|
/.next/
|
||||||
@@ -38,6 +37,3 @@ Thumbs.db
|
|||||||
# Db
|
# Db
|
||||||
db.sqlite
|
db.sqlite
|
||||||
/searxng
|
/searxng
|
||||||
|
|
||||||
# Dev
|
|
||||||
docker-compose-dev.yaml
|
|
||||||
|
55
README.md
55
README.md
@@ -1,5 +1,21 @@
|
|||||||
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc -->
|
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc -->
|
||||||
|
|
||||||
|
<div align="center" markdown="1">
|
||||||
|
<sup>Special thanks to:</sup>
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<a href="https://www.warp.dev/perplexica">
|
||||||
|
<img alt="Warp sponsorship" width="400" src="https://github.com/user-attachments/assets/775dd593-9b5f-40f1-bf48-479faff4c27b">
|
||||||
|
</a>
|
||||||
|
|
||||||
|
### [Warp, the AI Devtool that lives in your terminal](https://www.warp.dev/perplexica)
|
||||||
|
|
||||||
|
[Available for MacOS, Linux, & Windows](https://www.warp.dev/perplexica)
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<hr/>
|
||||||
|
|
||||||
[](https://discord.gg/26aArMy8tT)
|
[](https://discord.gg/26aArMy8tT)
|
||||||
|
|
||||||

|

|
||||||
@@ -10,12 +26,13 @@
|
|||||||
- [Preview](#preview)
|
- [Preview](#preview)
|
||||||
- [Features](#features)
|
- [Features](#features)
|
||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
|
- [Docker Installation (Recommended)](#docker-installation-recommended)
|
||||||
- [Non-Docker Installation](#non-docker-installation)
|
- [Non-Docker Installation](#non-docker-installation)
|
||||||
|
- [Nginx Reverse Proxy](#nginx-reverse-proxy)
|
||||||
- [Ollama Connection Errors](#ollama-connection-errors)
|
- [Ollama Connection Errors](#ollama-connection-errors)
|
||||||
- [Using as a Search Engine](#using-as-a-search-engine)
|
- [Using as a Search Engine](#using-as-a-search-engine)
|
||||||
- [Using Perplexica's API](#using-perplexicas-api)
|
- [Using Perplexica's API](#using-perplexicas-api)
|
||||||
- [Expose Perplexica to a network](#expose-perplexica-to-network)
|
- [Expose Perplexica to a Network](#expose-perplexica-to-a-network)
|
||||||
- [One-Click Deployment](#one-click-deployment)
|
- [One-Click Deployment](#one-click-deployment)
|
||||||
- [Upcoming Features](#upcoming-features)
|
- [Upcoming Features](#upcoming-features)
|
||||||
- [Support Us](#support-us)
|
- [Support Us](#support-us)
|
||||||
@@ -55,9 +72,9 @@ It has many more features like image and video search. Some of the planned featu
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. Using Docker is highly recommended.
|
Perplexica can be installed using Docker (recommended) or directly on your system.
|
||||||
|
|
||||||
### Getting Started with Docker (Recommended)
|
### Docker Installation (Recommended)
|
||||||
|
|
||||||
1. Ensure Docker is installed and running on your system.
|
1. Ensure Docker is installed and running on your system.
|
||||||
2. Clone the Perplexica repository:
|
2. Clone the Perplexica repository:
|
||||||
@@ -85,10 +102,15 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
|||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:3000 in your web browser.
|
6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:8080 in your web browser.
|
||||||
|
|
||||||
**Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal.
|
**Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal.
|
||||||
|
|
||||||
|
The Docker configuration is located in the `docker/` directory, containing:
|
||||||
|
- Dockerfile with multi-stage build for efficient images
|
||||||
|
- Service configurations for the integrated process manager
|
||||||
|
- Nginx reverse proxy configuration
|
||||||
|
|
||||||
### Non-Docker Installation
|
### Non-Docker Installation
|
||||||
|
|
||||||
1. Install SearXNG and allow `JSON` format in the SearXNG settings.
|
1. Install SearXNG and allow `JSON` format in the SearXNG settings.
|
||||||
@@ -102,6 +124,17 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
|||||||
|
|
||||||
See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like exposing it your network, etc.
|
See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like exposing it your network, etc.
|
||||||
|
|
||||||
|
### Nginx Reverse Proxy
|
||||||
|
|
||||||
|
Perplexica includes an Nginx reverse proxy that provides several key benefits:
|
||||||
|
|
||||||
|
- **Single Port Access**: Access both frontend and backend through a single port (8080)
|
||||||
|
- **Dynamic Configuration**: Works with any domain or IP without rebuilding
|
||||||
|
- **WebSocket Support**: Automatic WebSocket URL configuration based on the current domain
|
||||||
|
- **Security Headers**: Enhanced security with proper HTTP headers
|
||||||
|
|
||||||
|
When using Docker, the reverse proxy is automatically configured. Access Perplexica at `http://localhost:8080` or `http://your-ip:8080` after starting the containers.
|
||||||
|
|
||||||
### Ollama Connection Errors
|
### Ollama Connection Errors
|
||||||
|
|
||||||
If you're encountering an Ollama connection error, it is likely due to the backend being unable to connect to Ollama's API. To fix this issue you can:
|
If you're encountering an Ollama connection error, it is likely due to the backend being unable to connect to Ollama's API. To fix this issue you can:
|
||||||
@@ -127,7 +160,7 @@ If you wish to use Perplexica as an alternative to traditional search engines li
|
|||||||
|
|
||||||
1. Open your browser's settings.
|
1. Open your browser's settings.
|
||||||
2. Navigate to the 'Search Engines' section.
|
2. Navigate to the 'Search Engines' section.
|
||||||
3. Add a new site search with the following URL: `http://localhost:3000/?q=%s`. Replace `localhost` with your IP address or domain name, and `3000` with the port number if Perplexica is not hosted locally.
|
3. Add a new site search with the following URL: `http://localhost:8080/?q=%s`. Replace `localhost` with your IP address or domain name if needed.
|
||||||
4. Click the add button. Now, you can use Perplexica directly from your browser's search bar.
|
4. Click the add button. Now, you can use Perplexica directly from your browser's search bar.
|
||||||
|
|
||||||
## Using Perplexica's API
|
## Using Perplexica's API
|
||||||
@@ -136,9 +169,15 @@ Perplexica also provides an API for developers looking to integrate its powerful
|
|||||||
|
|
||||||
For more details, check out the full documentation [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/API/SEARCH.md).
|
For more details, check out the full documentation [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/API/SEARCH.md).
|
||||||
|
|
||||||
## Expose Perplexica to network
|
## Expose Perplexica to a Network
|
||||||
|
|
||||||
You can access Perplexica over your home network by following our networking guide [here](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md).
|
Perplexica can be easily accessed over your home network or exposed to the internet through the Nginx reverse proxy. With this setup:
|
||||||
|
|
||||||
|
1. **Local Network Access**: Access Perplexica from any device on your network using `http://server-ip:8080`
|
||||||
|
2. **Domain Configuration**: If you have a domain name, point it to your server and access Perplexica with `http://your-domain.com:8080`
|
||||||
|
3. **SSL Support**: Configure SSL certificates in Nginx for secure `https://` access
|
||||||
|
|
||||||
|
For more network configuration details, see our [networking guide](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md).
|
||||||
|
|
||||||
## One-Click Deployment
|
## One-Click Deployment
|
||||||
|
|
||||||
|
@@ -1,15 +0,0 @@
|
|||||||
FROM node:20.18.0-alpine
|
|
||||||
|
|
||||||
ARG NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
|
||||||
ARG NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
|
||||||
ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL}
|
|
||||||
ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
|
|
||||||
|
|
||||||
WORKDIR /home/perplexica
|
|
||||||
|
|
||||||
COPY ui /home/perplexica/
|
|
||||||
|
|
||||||
RUN yarn install --frozen-lockfile
|
|
||||||
RUN yarn build
|
|
||||||
|
|
||||||
CMD ["yarn", "start"]
|
|
@@ -1,17 +0,0 @@
|
|||||||
FROM node:18-slim
|
|
||||||
|
|
||||||
WORKDIR /home/perplexica
|
|
||||||
|
|
||||||
COPY src /home/perplexica/src
|
|
||||||
COPY tsconfig.json /home/perplexica/
|
|
||||||
COPY drizzle.config.ts /home/perplexica/
|
|
||||||
COPY package.json /home/perplexica/
|
|
||||||
COPY yarn.lock /home/perplexica/
|
|
||||||
|
|
||||||
RUN mkdir /home/perplexica/data
|
|
||||||
RUN mkdir /home/perplexica/uploads
|
|
||||||
|
|
||||||
RUN yarn install --frozen-lockfile --network-timeout 600000
|
|
||||||
RUN yarn build
|
|
||||||
|
|
||||||
CMD ["yarn", "start"]
|
|
@@ -2,49 +2,43 @@ services:
|
|||||||
searxng:
|
searxng:
|
||||||
image: docker.io/searxng/searxng:latest
|
image: docker.io/searxng/searxng:latest
|
||||||
volumes:
|
volumes:
|
||||||
- ./searxng:/etc/searxng:rw
|
- ./searxng:/etc/searxng
|
||||||
ports:
|
|
||||||
- '4000:8080'
|
|
||||||
networks:
|
networks:
|
||||||
- perplexica-network
|
- perplexica-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
perplexica-backend:
|
perplexica:
|
||||||
build:
|
image: itzcrazykns1337/perplexica:latest
|
||||||
context: .
|
|
||||||
dockerfile: backend.dockerfile
|
|
||||||
image: itzcrazykns1337/perplexica-backend:main
|
|
||||||
environment:
|
|
||||||
- SEARXNG_API_URL=http://searxng:8080
|
|
||||||
depends_on:
|
|
||||||
- searxng
|
|
||||||
ports:
|
ports:
|
||||||
- '3001:3001'
|
- "8080:8080"
|
||||||
|
environment:
|
||||||
|
- SEARXNG_API_URL=http://searxng:4000
|
||||||
|
- SIMILARITY_MEASURE=cosine
|
||||||
|
- KEEP_ALIVE=5m
|
||||||
|
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
|
||||||
|
- GROQ_API_KEY=${GROQ_API_KEY:-}
|
||||||
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
|
||||||
|
- GEMINI_API_KEY=${GEMINI_API_KEY:-}
|
||||||
|
- OLLAMA_API_URL=${OLLAMA_API_URL:-}
|
||||||
|
- CUSTOM_OPENAI_API_KEY=${CUSTOM_OPENAI_API_KEY:-}
|
||||||
|
- CUSTOM_OPENAI_API_URL=${CUSTOM_OPENAI_API_URL:-}
|
||||||
|
- CUSTOM_OPENAI_MODEL_NAME=${CUSTOM_OPENAI_MODEL_NAME:-}
|
||||||
volumes:
|
volumes:
|
||||||
- backend-dbstore:/home/perplexica/data
|
- backend-dbstore:/app/backend/data
|
||||||
- uploads:/home/perplexica/uploads
|
- uploads:/app/backend/uploads
|
||||||
- ./config.toml:/home/perplexica/config.toml
|
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
- 'host.docker.internal:host-gateway'
|
- 'host.docker.internal:host-gateway'
|
||||||
networks:
|
|
||||||
- perplexica-network
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
perplexica-frontend:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: app.dockerfile
|
|
||||||
args:
|
|
||||||
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
|
||||||
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
|
||||||
image: itzcrazykns1337/perplexica-frontend:main
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- perplexica-backend
|
- searxng
|
||||||
ports:
|
|
||||||
- '3000:3000'
|
|
||||||
networks:
|
networks:
|
||||||
- perplexica-network
|
- perplexica-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 5s
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
perplexica-network:
|
perplexica-network:
|
||||||
|
93
docker/Dockerfile
Normal file
93
docker/Dockerfile
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Multi-stage build for Perplexica
|
||||||
|
# Stage 1: Build the backend
|
||||||
|
FROM node:lts-alpine as backend-builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY src ./src
|
||||||
|
COPY tsconfig.json drizzle.config.ts package.json yarn.lock ./
|
||||||
|
|
||||||
|
RUN yarn install --frozen-lockfile --network-timeout 600000 && \
|
||||||
|
yarn build
|
||||||
|
|
||||||
|
# Stage 2: Build the frontend
|
||||||
|
FROM node:lts-alpine as frontend-builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY ui ./
|
||||||
|
ARG NEXT_PUBLIC_API_URL=/api
|
||||||
|
ARG NEXT_PUBLIC_WS_URL=auto
|
||||||
|
ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
|
||||||
|
ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL}
|
||||||
|
|
||||||
|
RUN yarn install --frozen-lockfile && \
|
||||||
|
yarn build
|
||||||
|
|
||||||
|
# Stage 3: Final image
|
||||||
|
FROM node:lts-alpine
|
||||||
|
|
||||||
|
# Install curl and jq for GitHub API access
|
||||||
|
RUN apk add --no-cache curl jq
|
||||||
|
|
||||||
|
# Determine latest S6 overlay version at build time
|
||||||
|
RUN S6_OVERLAY_VERSION=$(curl -s https://api.github.com/repos/just-containers/s6-overlay/releases/latest | jq -r .tag_name | sed 's/^v//') && \
|
||||||
|
echo "Using S6 overlay version: $S6_OVERLAY_VERSION" && \
|
||||||
|
echo "$S6_OVERLAY_VERSION" > /tmp/s6-version
|
||||||
|
|
||||||
|
# Use Docker's TARGETARCH for automatic architecture detection
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
# Install additional required packages and create directory structure in one layer
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
nginx \
|
||||||
|
tzdata \
|
||||||
|
bash && \
|
||||||
|
mkdir -p /app/backend /app/frontend /app/data /app/uploads
|
||||||
|
|
||||||
|
# Map Docker's architecture names to s6-overlay architecture names and download/install
|
||||||
|
RUN S6_OVERLAY_VERSION=$(cat /tmp/s6-version) && \
|
||||||
|
case "${TARGETARCH}" in \
|
||||||
|
"amd64") S6_OVERLAY_ARCH="x86_64" ;; \
|
||||||
|
"arm64") S6_OVERLAY_ARCH="aarch64" ;; \
|
||||||
|
"arm") S6_OVERLAY_ARCH="arm" ;; \
|
||||||
|
*) echo "Unsupported architecture: ${TARGETARCH}. Only amd64, arm64, and arm are supported." && exit 1 ;; \
|
||||||
|
esac && \
|
||||||
|
echo "Target architecture: ${TARGETARCH} -> S6 architecture: ${S6_OVERLAY_ARCH}" && \
|
||||||
|
echo "Downloading s6-overlay v${S6_OVERLAY_VERSION} for architecture: ${S6_OVERLAY_ARCH}" && \
|
||||||
|
curl -L -s -o /tmp/s6-overlay-noarch.tar.xz "https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz" && \
|
||||||
|
tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz && \
|
||||||
|
curl -L -s -o /tmp/s6-overlay-arch.tar.xz "https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_OVERLAY_ARCH}.tar.xz" && \
|
||||||
|
tar -C / -Jxpf /tmp/s6-overlay-arch.tar.xz && \
|
||||||
|
curl -L -s -o /tmp/s6-overlay-symlinks-noarch.tar.xz "https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-noarch.tar.xz" && \
|
||||||
|
tar -C / -Jxpf /tmp/s6-overlay-symlinks-noarch.tar.xz && \
|
||||||
|
rm -f /tmp/s6-overlay-*.tar.xz /tmp/s6-version
|
||||||
|
|
||||||
|
# Copy configuration files
|
||||||
|
COPY docker/etc/s6-overlay/services /etc/services.d/
|
||||||
|
COPY docker/etc/nginx/nginx.conf /etc/nginx/nginx.conf
|
||||||
|
|
||||||
|
# Make service scripts executable
|
||||||
|
RUN chmod +x /etc/services.d/*/run /etc/services.d/*/finish
|
||||||
|
|
||||||
|
# Copy application files from builders
|
||||||
|
COPY --from=backend-builder /app/dist /app/backend/dist
|
||||||
|
COPY --from=backend-builder /app/node_modules /app/backend/node_modules
|
||||||
|
COPY --from=backend-builder /app/package.json /app/backend/package.json
|
||||||
|
COPY --from=backend-builder /app/drizzle.config.ts /app/backend/drizzle.config.ts
|
||||||
|
# Copy only the schema file for Drizzle migrations
|
||||||
|
COPY --from=backend-builder /app/src/db/schema.ts /app/backend/src/db/schema.ts
|
||||||
|
COPY --from=frontend-builder /app/.next /app/frontend/.next
|
||||||
|
COPY --from=frontend-builder /app/node_modules /app/frontend/node_modules
|
||||||
|
COPY --from=frontend-builder /app/package.json /app/frontend/package.json
|
||||||
|
COPY --from=frontend-builder /app/public /app/frontend/public
|
||||||
|
|
||||||
|
# Configure volumes and ports
|
||||||
|
VOLUME ["/app/backend/data", "/app/backend/uploads"]
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Set up healthcheck
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8080/ || exit 1
|
||||||
|
|
||||||
|
ENTRYPOINT ["/init"]
|
55
docker/etc/nginx/nginx.conf
Normal file
55
docker/etc/nginx/nginx.conf
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
port_in_redirect on;
|
||||||
|
absolute_redirect off;
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 8080;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
# Global timeout settings for all locations
|
||||||
|
proxy_read_timeout 86400s; # 24 hours
|
||||||
|
proxy_send_timeout 86400s; # 24 hours
|
||||||
|
proxy_connect_timeout 60s; # Connection establishment timeout
|
||||||
|
|
||||||
|
# API requests
|
||||||
|
location /api {
|
||||||
|
proxy_pass http://localhost:3001;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
# WebSocket requests
|
||||||
|
location /ws {
|
||||||
|
proxy_pass http://localhost:3001;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Frontend requests
|
||||||
|
location / {
|
||||||
|
proxy_pass http://localhost:3000;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Security headers
|
||||||
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||||
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
|
add_header X-Content-Type-Options "nosniff" always;
|
||||||
|
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||||
|
server_tokens off;
|
||||||
|
}
|
||||||
|
}
|
3
docker/etc/s6-overlay/services/backend/finish
Normal file
3
docker/etc/s6-overlay/services/backend/finish
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
s6-svc -d /var/run/s6/services/frontend
|
||||||
|
s6-svc -d /var/run/s6/services/nginx
|
8
docker/etc/s6-overlay/services/backend/run
Normal file
8
docker/etc/s6-overlay/services/backend/run
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
cd /app/backend
|
||||||
|
|
||||||
|
# Run database migrations before starting the app
|
||||||
|
yarn db:push
|
||||||
|
|
||||||
|
# Start the application
|
||||||
|
exec node dist/app.js
|
2
docker/etc/s6-overlay/services/frontend/finish
Normal file
2
docker/etc/s6-overlay/services/frontend/finish
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
s6-svc -d /var/run/s6/services/nginx
|
3
docker/etc/s6-overlay/services/frontend/run
Normal file
3
docker/etc/s6-overlay/services/frontend/run
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
cd /app/frontend
|
||||||
|
exec node_modules/.bin/next start
|
2
docker/etc/s6-overlay/services/nginx/run
Normal file
2
docker/etc/s6-overlay/services/nginx/run
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/with-contenv bash
|
||||||
|
exec nginx -g "daemon off;"
|
@@ -1,109 +1,46 @@
|
|||||||
# Expose Perplexica to a network
|
# Accessing Perplexica over a Network
|
||||||
|
|
||||||
This guide will show you how to make Perplexica available over a network. Follow these steps to allow computers on the same network to interact with Perplexica. Choose the instructions that match the operating system you are using.
|
This guide explains how to access Perplexica over your network using the nginx reverse proxy included in the Docker setup.
|
||||||
|
|
||||||
## Windows
|
## Basic Network Access
|
||||||
|
|
||||||
1. Open PowerShell as Administrator
|
Perplexica is automatically accessible from any device on your network:
|
||||||
|
|
||||||
2. Navigate to the directory containing the `docker-compose.yaml` file
|
|
||||||
|
|
||||||
3. Stop and remove the existing Perplexica containers and images:
|
|
||||||
|
|
||||||
|
1. Start Perplexica using Docker Compose:
|
||||||
```bash
|
```bash
|
||||||
docker compose down --rmi all
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Open the `docker-compose.yaml` file in a text editor like Notepad++
|
2. Find your server's IP address:
|
||||||
|
- **Windows**: `ipconfig` in Command Prompt
|
||||||
|
- **macOS**: `ifconfig | grep "inet "` in Terminal
|
||||||
|
- **Linux**: `ip addr show | grep "inet "` in Terminal
|
||||||
|
|
||||||
5. Replace `127.0.0.1` with the IP address of the server Perplexica is running on in these two lines:
|
3. Access Perplexica from any device on your network:
|
||||||
|
```
|
||||||
```bash
|
http://YOUR_SERVER_IP:8080
|
||||||
args:
|
|
||||||
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
|
||||||
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
|
||||||
```
|
```
|
||||||
|
|
||||||
6. Save and close the `docker-compose.yaml` file
|
## Custom Port Configuration
|
||||||
|
|
||||||
7. Rebuild and restart the Perplexica container:
|
If you need to use a different port instead of the default 8080:
|
||||||
|
|
||||||
```bash
|
1. Modify the `docker-compose.yaml` file:
|
||||||
docker compose up -d --build
|
```yaml
|
||||||
|
perplexica:
|
||||||
|
ports:
|
||||||
|
- "YOUR_CUSTOM_PORT:8080"
|
||||||
```
|
```
|
||||||
|
|
||||||
## macOS
|
2. Restart the containers:
|
||||||
|
|
||||||
1. Open the Terminal application
|
|
||||||
|
|
||||||
2. Navigate to the directory with the `docker-compose.yaml` file:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd /path/to/docker-compose.yaml
|
docker compose down && docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Stop and remove existing containers and images:
|
## Troubleshooting
|
||||||
|
|
||||||
```bash
|
If you encounter issues accessing Perplexica over your network:
|
||||||
docker compose down --rmi all
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Open `docker-compose.yaml` in a text editor like Sublime Text:
|
1. **Firewall Settings**: Ensure port 8080 (or your custom port) is allowed in your firewall
|
||||||
|
2. **Docker Logs**: Check for any connection issues with `docker logs perplexica`
|
||||||
```bash
|
3. **Network Access**: Make sure your devices are on the same network and can reach the server
|
||||||
nano docker-compose.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
5. Replace `127.0.0.1` with the server IP in these lines:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
args:
|
|
||||||
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
|
||||||
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
|
||||||
```
|
|
||||||
|
|
||||||
6. Save and exit the editor
|
|
||||||
|
|
||||||
7. Rebuild and restart Perplexica:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up -d --build
|
|
||||||
```
|
|
||||||
|
|
||||||
## Linux
|
|
||||||
|
|
||||||
1. Open the terminal
|
|
||||||
|
|
||||||
2. Navigate to the `docker-compose.yaml` directory:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /path/to/docker-compose.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Stop and remove containers and images:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose down --rmi all
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Edit `docker-compose.yaml`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
nano docker-compose.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
5. Replace `127.0.0.1` with the server IP:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
args:
|
|
||||||
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
|
||||||
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
|
||||||
```
|
|
||||||
|
|
||||||
6. Save and exit the editor
|
|
||||||
|
|
||||||
7. Rebuild and restart Perplexica:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up -d --build
|
|
||||||
```
|
|
||||||
|
@@ -30,8 +30,8 @@
|
|||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@langchain/anthropic": "^0.2.3",
|
"@langchain/anthropic": "^0.2.3",
|
||||||
"@langchain/community": "^0.2.16",
|
"@langchain/community": "^0.2.16",
|
||||||
"@langchain/google-genai": "^0.0.23",
|
|
||||||
"@langchain/openai": "^0.0.25",
|
"@langchain/openai": "^0.0.25",
|
||||||
|
"@langchain/google-genai": "^0.0.23",
|
||||||
"@xenova/transformers": "^2.17.1",
|
"@xenova/transformers": "^2.17.1",
|
||||||
"axios": "^1.6.8",
|
"axios": "^1.6.8",
|
||||||
"better-sqlite3": "^11.0.0",
|
"better-sqlite3": "^11.0.0",
|
||||||
|
@@ -3,12 +3,6 @@ PORT = 3001 # Port to run the server on
|
|||||||
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||||
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
||||||
|
|
||||||
[SEARCH_ENGINE_BACKENDS] # "google" | "searxng" | "bing" | "brave" | "yacy"
|
|
||||||
SEARCH = "searxng"
|
|
||||||
IMAGE = "searxng"
|
|
||||||
VIDEO = "searxng"
|
|
||||||
NEWS = "searxng"
|
|
||||||
|
|
||||||
[MODELS.OPENAI]
|
[MODELS.OPENAI]
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
|
|
||||||
@@ -24,22 +18,10 @@ API_KEY = ""
|
|||||||
[MODELS.CUSTOM_OPENAI]
|
[MODELS.CUSTOM_OPENAI]
|
||||||
API_KEY = ""
|
API_KEY = ""
|
||||||
API_URL = ""
|
API_URL = ""
|
||||||
|
MODEL_NAME = ""
|
||||||
|
|
||||||
[MODELS.OLLAMA]
|
[MODELS.OLLAMA]
|
||||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||||
|
|
||||||
[SEARCH_ENGINES.GOOGLE]
|
[API_ENDPOINTS]
|
||||||
API_KEY = ""
|
SEARXNG = "http://localhost:32768" # SearxNG API URL
|
||||||
CSE_ID = ""
|
|
||||||
|
|
||||||
[SEARCH_ENGINES.SEARXNG]
|
|
||||||
ENDPOINT = ""
|
|
||||||
|
|
||||||
[SEARCH_ENGINES.BING]
|
|
||||||
SUBSCRIPTION_KEY = ""
|
|
||||||
|
|
||||||
[SEARCH_ENGINES.BRAVE]
|
|
||||||
API_KEY = ""
|
|
||||||
|
|
||||||
[SEARCH_ENGINES.YACY]
|
|
||||||
ENDPOINT = ""
|
|
@@ -15,5 +15,3 @@ server:
|
|||||||
engines:
|
engines:
|
||||||
- name: wolframalpha
|
- name: wolframalpha
|
||||||
disabled: false
|
disabled: false
|
||||||
- name: qwant
|
|
||||||
disabled: true
|
|
||||||
|
@@ -7,12 +7,7 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { getImageSearchEngineBackend } from '../config';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const imageSearchChainPrompt = `
|
const imageSearchChainPrompt = `
|
||||||
@@ -41,103 +36,6 @@ type ImageSearchChainInput = {
|
|||||||
query: string;
|
query: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
async function performImageSearch(query: string) {
|
|
||||||
const searchEngine = getImageSearchEngineBackend();
|
|
||||||
let images = [];
|
|
||||||
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'google': {
|
|
||||||
const googleResult = await searchGooglePSE(query);
|
|
||||||
images = googleResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.displayLink,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'searxng': {
|
|
||||||
const searxResult = await searchSearxng(query, {
|
|
||||||
engines: ['google images', 'bing images'],
|
|
||||||
pageno: 1,
|
|
||||||
});
|
|
||||||
searxResult.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
images.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'brave': {
|
|
||||||
const braveResult = await searchBraveAPI(query);
|
|
||||||
images = braveResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.url,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'yacy': {
|
|
||||||
const yacyResult = await searchYaCy(query);
|
|
||||||
images = yacyResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.url,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'bing': {
|
|
||||||
const bingResult = await searchBingAPI(query);
|
|
||||||
images = bingResult.results
|
|
||||||
.map((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
return {
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
source: result.url,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter(Boolean);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return images;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strParser = new StringOutputParser();
|
const strParser = new StringOutputParser();
|
||||||
|
|
||||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
const createImageSearchChain = (llm: BaseChatModel) => {
|
||||||
@@ -154,7 +52,22 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||||||
llm,
|
llm,
|
||||||
strParser,
|
strParser,
|
||||||
RunnableLambda.from(async (input: string) => {
|
RunnableLambda.from(async (input: string) => {
|
||||||
const images = await performImageSearch(input);
|
const res = await searchSearxng(input, {
|
||||||
|
engines: ['bing images', 'google images'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const images = [];
|
||||||
|
|
||||||
|
res.results.forEach((result) => {
|
||||||
|
if (result.img_src && result.url && result.title) {
|
||||||
|
images.push({
|
||||||
|
img_src: result.img_src,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return images.slice(0, 10);
|
return images.slice(0, 10);
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
@@ -7,11 +7,7 @@ import { PromptTemplate } from '@langchain/core/prompts';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { getVideoSearchEngineBackend } from '../config';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
const VideoSearchChainPrompt = `
|
const VideoSearchChainPrompt = `
|
||||||
@@ -42,102 +38,6 @@ type VideoSearchChainInput = {
|
|||||||
|
|
||||||
const strParser = new StringOutputParser();
|
const strParser = new StringOutputParser();
|
||||||
|
|
||||||
async function performVideoSearch(query: string) {
|
|
||||||
const searchEngine = getVideoSearchEngineBackend();
|
|
||||||
const youtubeQuery = `${query} site:youtube.com`;
|
|
||||||
let videos = [];
|
|
||||||
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'google': {
|
|
||||||
const googleResult = await searchGooglePSE(youtubeQuery);
|
|
||||||
googleResult.results.forEach((result) => {
|
|
||||||
// Use .results instead of .originalres
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
const videoId = new URL(result.url).searchParams.get('v');
|
|
||||||
videos.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: videoId
|
|
||||||
? `https://www.youtube.com/embed/${videoId}`
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'searxng': {
|
|
||||||
const searxResult = await searchSearxng(query, {
|
|
||||||
engines: ['youtube'],
|
|
||||||
});
|
|
||||||
searxResult.results.forEach((result) => {
|
|
||||||
if (
|
|
||||||
result.thumbnail &&
|
|
||||||
result.url &&
|
|
||||||
result.title &&
|
|
||||||
result.iframe_src
|
|
||||||
) {
|
|
||||||
videos.push({
|
|
||||||
img_src: result.thumbnail,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: result.iframe_src,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'brave': {
|
|
||||||
const braveResult = await searchBraveAPI(youtubeQuery);
|
|
||||||
braveResult.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
const videoId = new URL(result.url).searchParams.get('v');
|
|
||||||
videos.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: videoId
|
|
||||||
? `https://www.youtube.com/embed/${videoId}`
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'yacy': {
|
|
||||||
console.log('Not available for yacy');
|
|
||||||
videos = [];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'bing': {
|
|
||||||
const bingResult = await searchBingAPI(youtubeQuery);
|
|
||||||
bingResult.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
const videoId = new URL(result.url).searchParams.get('v');
|
|
||||||
videos.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: videoId
|
|
||||||
? `https://www.youtube.com/embed/${videoId}`
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return videos;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
const createVideoSearchChain = (llm: BaseChatModel) => {
|
||||||
return RunnableSequence.from([
|
return RunnableSequence.from([
|
||||||
RunnableMap.from({
|
RunnableMap.from({
|
||||||
@@ -152,7 +52,28 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||||||
llm,
|
llm,
|
||||||
strParser,
|
strParser,
|
||||||
RunnableLambda.from(async (input: string) => {
|
RunnableLambda.from(async (input: string) => {
|
||||||
const videos = await performVideoSearch(input);
|
const res = await searchSearxng(input, {
|
||||||
|
engines: ['youtube'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const videos = [];
|
||||||
|
|
||||||
|
res.results.forEach((result) => {
|
||||||
|
if (
|
||||||
|
result.thumbnail &&
|
||||||
|
result.url &&
|
||||||
|
result.title &&
|
||||||
|
result.iframe_src
|
||||||
|
) {
|
||||||
|
videos.push({
|
||||||
|
img_src: result.thumbnail,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
iframe_src: result.iframe_src,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return videos.slice(0, 10);
|
return videos.slice(0, 10);
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
121
src/config.ts
121
src/config.ts
@@ -10,12 +10,6 @@ interface Config {
|
|||||||
SIMILARITY_MEASURE: string;
|
SIMILARITY_MEASURE: string;
|
||||||
KEEP_ALIVE: string;
|
KEEP_ALIVE: string;
|
||||||
};
|
};
|
||||||
SEARCH_ENGINE_BACKENDS: {
|
|
||||||
SEARCH: string;
|
|
||||||
IMAGE: string;
|
|
||||||
VIDEO: string;
|
|
||||||
NEWS: string;
|
|
||||||
};
|
|
||||||
MODELS: {
|
MODELS: {
|
||||||
OPENAI: {
|
OPENAI: {
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
@@ -38,23 +32,8 @@ interface Config {
|
|||||||
MODEL_NAME: string;
|
MODEL_NAME: string;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
SEARCH_ENGINES: {
|
API_ENDPOINTS: {
|
||||||
GOOGLE: {
|
SEARXNG: string;
|
||||||
API_KEY: string;
|
|
||||||
CSE_ID: string;
|
|
||||||
};
|
|
||||||
SEARXNG: {
|
|
||||||
ENDPOINT: string;
|
|
||||||
};
|
|
||||||
BING: {
|
|
||||||
SUBSCRIPTION_KEY: string;
|
|
||||||
};
|
|
||||||
BRAVE: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
YACY: {
|
|
||||||
ENDPOINT: string;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -62,63 +41,83 @@ type RecursivePartial<T> = {
|
|||||||
[P in keyof T]?: RecursivePartial<T[P]>;
|
[P in keyof T]?: RecursivePartial<T[P]>;
|
||||||
};
|
};
|
||||||
|
|
||||||
const loadConfig = () =>
|
const loadConfig = () => {
|
||||||
toml.parse(
|
try {
|
||||||
|
return toml.parse(
|
||||||
fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'),
|
fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'),
|
||||||
) as any as Config;
|
) as any as Config;
|
||||||
|
} catch (error) {
|
||||||
|
// Return default config if file doesn't exist
|
||||||
|
return {
|
||||||
|
GENERAL: {
|
||||||
|
PORT: 3001,
|
||||||
|
SIMILARITY_MEASURE: 'cosine',
|
||||||
|
KEEP_ALIVE: '5m',
|
||||||
|
},
|
||||||
|
MODELS: {
|
||||||
|
OPENAI: {
|
||||||
|
API_KEY: '',
|
||||||
|
},
|
||||||
|
GROQ: {
|
||||||
|
API_KEY: '',
|
||||||
|
},
|
||||||
|
ANTHROPIC: {
|
||||||
|
API_KEY: '',
|
||||||
|
},
|
||||||
|
GEMINI: {
|
||||||
|
API_KEY: '',
|
||||||
|
},
|
||||||
|
OLLAMA: {
|
||||||
|
API_URL: '',
|
||||||
|
},
|
||||||
|
CUSTOM_OPENAI: {
|
||||||
|
API_URL: '',
|
||||||
|
API_KEY: '',
|
||||||
|
MODEL_NAME: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
API_ENDPOINTS: {
|
||||||
|
SEARXNG: '',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
export const getPort = () => loadConfig().GENERAL.PORT;
|
export const getPort = () =>
|
||||||
|
process.env.PORT ? parseInt(process.env.PORT, 10) : loadConfig().GENERAL.PORT;
|
||||||
|
|
||||||
export const getSimilarityMeasure = () =>
|
export const getSimilarityMeasure = () =>
|
||||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
process.env.SIMILARITY_MEASURE || loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||||
|
|
||||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
export const getKeepAlive = () =>
|
||||||
|
process.env.KEEP_ALIVE || loadConfig().GENERAL.KEEP_ALIVE;
|
||||||
|
|
||||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
export const getOpenaiApiKey = () =>
|
||||||
|
process.env.OPENAI_API_KEY || loadConfig().MODELS.OPENAI.API_KEY;
|
||||||
|
|
||||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
export const getGroqApiKey = () =>
|
||||||
|
process.env.GROQ_API_KEY || loadConfig().MODELS.GROQ.API_KEY;
|
||||||
|
|
||||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
export const getAnthropicApiKey = () =>
|
||||||
|
process.env.ANTHROPIC_API_KEY || loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||||
|
|
||||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
export const getGeminiApiKey = () =>
|
||||||
|
process.env.GEMINI_API_KEY || loadConfig().MODELS.GEMINI.API_KEY;
|
||||||
export const getSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.SEARCH;
|
|
||||||
|
|
||||||
export const getImageSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.IMAGE || getSearchEngineBackend();
|
|
||||||
|
|
||||||
export const getVideoSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.VIDEO || getSearchEngineBackend();
|
|
||||||
|
|
||||||
export const getNewsSearchEngineBackend = () =>
|
|
||||||
loadConfig().SEARCH_ENGINE_BACKENDS.NEWS || getSearchEngineBackend();
|
|
||||||
|
|
||||||
export const getGoogleApiKey = () => loadConfig().SEARCH_ENGINES.GOOGLE.API_KEY;
|
|
||||||
|
|
||||||
export const getGoogleCseId = () => loadConfig().SEARCH_ENGINES.GOOGLE.CSE_ID;
|
|
||||||
|
|
||||||
export const getBraveApiKey = () => loadConfig().SEARCH_ENGINES.BRAVE.API_KEY;
|
|
||||||
|
|
||||||
export const getBingSubscriptionKey = () =>
|
|
||||||
loadConfig().SEARCH_ENGINES.BING.SUBSCRIPTION_KEY;
|
|
||||||
|
|
||||||
export const getYacyJsonEndpoint = () =>
|
|
||||||
loadConfig().SEARCH_ENGINES.YACY.ENDPOINT;
|
|
||||||
|
|
||||||
export const getSearxngApiEndpoint = () =>
|
export const getSearxngApiEndpoint = () =>
|
||||||
process.env.SEARXNG_API_URL || loadConfig().SEARCH_ENGINES.SEARXNG.ENDPOINT;
|
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
export const getOllamaApiEndpoint = () =>
|
||||||
|
process.env.OLLAMA_API_URL || loadConfig().MODELS.OLLAMA.API_URL;
|
||||||
|
|
||||||
export const getCustomOpenaiApiKey = () =>
|
export const getCustomOpenaiApiKey = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
process.env.CUSTOM_OPENAI_API_KEY || loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||||
|
|
||||||
export const getCustomOpenaiApiUrl = () =>
|
export const getCustomOpenaiApiUrl = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
process.env.CUSTOM_OPENAI_API_URL || loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
||||||
|
|
||||||
export const getCustomOpenaiModelName = () =>
|
export const getCustomOpenaiModelName = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
process.env.CUSTOM_OPENAI_MODEL_NAME || loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||||
|
|
||||||
const mergeConfigs = (current: any, update: any): any => {
|
const mergeConfigs = (current: any, update: any): any => {
|
||||||
if (update === null || update === undefined) {
|
if (update === null || update === undefined) {
|
||||||
|
@@ -1,105 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getBingSubscriptionKey } from '../../config';
|
|
||||||
|
|
||||||
interface BingAPISearchResult {
|
|
||||||
_type: string;
|
|
||||||
name: string;
|
|
||||||
url: string;
|
|
||||||
displayUrl: string;
|
|
||||||
snippet?: string;
|
|
||||||
dateLastCrawled?: string;
|
|
||||||
thumbnailUrl?: string;
|
|
||||||
contentUrl?: string;
|
|
||||||
hostPageUrl?: string;
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
accentColor?: string;
|
|
||||||
contentSize?: string;
|
|
||||||
datePublished?: string;
|
|
||||||
encodingFormat?: string;
|
|
||||||
hostPageDisplayUrl?: string;
|
|
||||||
id?: string;
|
|
||||||
isLicensed?: boolean;
|
|
||||||
isFamilyFriendly?: boolean;
|
|
||||||
language?: string;
|
|
||||||
mediaUrl?: string;
|
|
||||||
motionThumbnailUrl?: string;
|
|
||||||
publisher?: string;
|
|
||||||
viewCount?: number;
|
|
||||||
webSearchUrl?: string;
|
|
||||||
primaryImageOfPage?: {
|
|
||||||
thumbnailUrl?: string;
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
};
|
|
||||||
video?: {
|
|
||||||
allowHttpsEmbed?: boolean;
|
|
||||||
embedHtml?: string;
|
|
||||||
allowMobileEmbed?: boolean;
|
|
||||||
viewCount?: number;
|
|
||||||
duration?: string;
|
|
||||||
};
|
|
||||||
image?: {
|
|
||||||
thumbnail?: {
|
|
||||||
contentUrl?: string;
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
};
|
|
||||||
imageInsightsToken?: string;
|
|
||||||
imageId?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchBingAPI = async (query: string) => {
|
|
||||||
try {
|
|
||||||
const bingApiKey = await getBingSubscriptionKey();
|
|
||||||
const url = new URL(`https://api.cognitive.microsoft.com/bing/v7.0/search`);
|
|
||||||
url.searchParams.append('q', query);
|
|
||||||
url.searchParams.append('responseFilter', 'Webpages,Images,Videos');
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString(), {
|
|
||||||
headers: {
|
|
||||||
'Ocp-Apim-Subscription-Key': bingApiKey,
|
|
||||||
Accept: 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (res.data.error) {
|
|
||||||
throw new Error(`Bing API Error: ${res.data.error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const originalres = res.data;
|
|
||||||
|
|
||||||
// Extract web, image, and video results
|
|
||||||
const webResults = originalres.webPages?.value || [];
|
|
||||||
const imageResults = originalres.images?.value || [];
|
|
||||||
const videoResults = originalres.videos?.value || [];
|
|
||||||
|
|
||||||
const results = webResults.map((item: BingAPISearchResult) => ({
|
|
||||||
title: item.name,
|
|
||||||
url: item.url,
|
|
||||||
content: item.snippet,
|
|
||||||
img_src:
|
|
||||||
item.primaryImageOfPage?.thumbnailUrl ||
|
|
||||||
imageResults.find((img: any) => img.hostPageUrl === item.url)
|
|
||||||
?.thumbnailUrl ||
|
|
||||||
videoResults.find((vid: any) => vid.hostPageUrl === item.url)
|
|
||||||
?.thumbnailUrl,
|
|
||||||
...(item.video && {
|
|
||||||
videoData: {
|
|
||||||
duration: item.video.duration,
|
|
||||||
embedUrl: item.video.embedHtml?.match(/src="(.*?)"/)?.[1],
|
|
||||||
},
|
|
||||||
publisher: item.publisher,
|
|
||||||
datePublished: item.datePublished,
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`Bing API Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@@ -1,102 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getBraveApiKey } from '../../config';
|
|
||||||
|
|
||||||
interface BraveSearchResult {
|
|
||||||
title: string;
|
|
||||||
url: string;
|
|
||||||
content?: string;
|
|
||||||
img_src?: string;
|
|
||||||
age?: string;
|
|
||||||
family_friendly?: boolean;
|
|
||||||
language?: string;
|
|
||||||
video?: {
|
|
||||||
embedUrl?: string;
|
|
||||||
duration?: string;
|
|
||||||
};
|
|
||||||
rating?: {
|
|
||||||
value: number;
|
|
||||||
scale: number;
|
|
||||||
};
|
|
||||||
products?: Array<{
|
|
||||||
name: string;
|
|
||||||
price?: string;
|
|
||||||
}>;
|
|
||||||
recipe?: {
|
|
||||||
ingredients?: string[];
|
|
||||||
cookTime?: string;
|
|
||||||
};
|
|
||||||
meta?: {
|
|
||||||
fetched?: string;
|
|
||||||
lastCrawled?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchBraveAPI = async (
|
|
||||||
query: string,
|
|
||||||
numResults: number = 20,
|
|
||||||
): Promise<{ results: BraveSearchResult[]; originalres: any }> => {
|
|
||||||
try {
|
|
||||||
const braveApiKey = await getBraveApiKey();
|
|
||||||
const url = new URL(`https://api.search.brave.com/res/v1/web/search`);
|
|
||||||
|
|
||||||
url.searchParams.append('q', query);
|
|
||||||
url.searchParams.append('count', numResults.toString());
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString(), {
|
|
||||||
headers: {
|
|
||||||
'X-Subscription-Token': braveApiKey,
|
|
||||||
Accept: 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (res.data.error) {
|
|
||||||
throw new Error(`Brave API Error: ${res.data.error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const originalres = res.data;
|
|
||||||
const webResults = originalres.web?.results || [];
|
|
||||||
|
|
||||||
const results: BraveSearchResult[] = webResults.map((item: any) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.description,
|
|
||||||
img_src: item.thumbnail?.src || item.deep_results?.images?.[0]?.src,
|
|
||||||
age: item.age,
|
|
||||||
family_friendly: item.family_friendly,
|
|
||||||
language: item.language,
|
|
||||||
video: item.video
|
|
||||||
? {
|
|
||||||
embedUrl: item.video.embed_url,
|
|
||||||
duration: item.video.duration,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
rating: item.rating
|
|
||||||
? {
|
|
||||||
value: item.rating.value,
|
|
||||||
scale: item.rating.scale_max,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
products: item.deep_results?.product_cluster?.map((p: any) => ({
|
|
||||||
name: p.name,
|
|
||||||
price: p.price,
|
|
||||||
})),
|
|
||||||
recipe: item.recipe
|
|
||||||
? {
|
|
||||||
ingredients: item.recipe.ingredients,
|
|
||||||
cookTime: item.recipe.cook_time,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
meta: {
|
|
||||||
fetched: item.meta?.fetched,
|
|
||||||
lastCrawled: item.meta?.last_crawled,
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`Brave API Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@@ -1,85 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getGoogleApiKey, getGoogleCseId } from '../../config';
|
|
||||||
|
|
||||||
interface GooglePSESearchResult {
|
|
||||||
kind: string;
|
|
||||||
title: string;
|
|
||||||
htmlTitle: string;
|
|
||||||
link: string;
|
|
||||||
displayLink: string;
|
|
||||||
snippet?: string;
|
|
||||||
htmlSnippet?: string;
|
|
||||||
cacheId?: string;
|
|
||||||
formattedUrl: string;
|
|
||||||
htmlFormattedUrl: string;
|
|
||||||
pagemap?: {
|
|
||||||
videoobject: any;
|
|
||||||
cse_thumbnail?: Array<{
|
|
||||||
src: string;
|
|
||||||
width: string;
|
|
||||||
height: string;
|
|
||||||
}>;
|
|
||||||
metatags?: Array<{
|
|
||||||
[key: string]: string;
|
|
||||||
author?: string;
|
|
||||||
}>;
|
|
||||||
cse_image?: Array<{
|
|
||||||
src: string;
|
|
||||||
}>;
|
|
||||||
};
|
|
||||||
fileFormat?: string;
|
|
||||||
image?: {
|
|
||||||
contextLink: string;
|
|
||||||
thumbnailLink: string;
|
|
||||||
};
|
|
||||||
mime?: string;
|
|
||||||
labels?: Array<{
|
|
||||||
name: string;
|
|
||||||
displayName: string;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchGooglePSE = async (query: string) => {
|
|
||||||
try {
|
|
||||||
const [googleApiKey, googleCseID] = await Promise.all([
|
|
||||||
getGoogleApiKey(),
|
|
||||||
getGoogleCseId(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const url = new URL(`https://www.googleapis.com/customsearch/v1`);
|
|
||||||
url.searchParams.append('q', query);
|
|
||||||
url.searchParams.append('cx', googleCseID);
|
|
||||||
url.searchParams.append('key', googleApiKey);
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString());
|
|
||||||
|
|
||||||
if (res.data.error) {
|
|
||||||
throw new Error(`Google PSE Error: ${res.data.error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const originalres = res.data.items;
|
|
||||||
|
|
||||||
const results = originalres.map((item: GooglePSESearchResult) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.link,
|
|
||||||
content: item.snippet,
|
|
||||||
img_src:
|
|
||||||
item.pagemap?.cse_image?.[0]?.src ||
|
|
||||||
item.pagemap?.cse_thumbnail?.[0]?.src ||
|
|
||||||
item.image?.thumbnailLink,
|
|
||||||
...(item.pagemap?.videoobject?.[0] && {
|
|
||||||
videoData: {
|
|
||||||
duration: item.pagemap.videoobject[0].duration,
|
|
||||||
embedUrl: item.pagemap.videoobject[0].embedurl,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`Google PSE Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@@ -1,79 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getYacyJsonEndpoint } from '../../config';
|
|
||||||
|
|
||||||
interface YaCySearchResult {
|
|
||||||
channels: {
|
|
||||||
title: string;
|
|
||||||
description: string;
|
|
||||||
link: string;
|
|
||||||
image: {
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
link: string;
|
|
||||||
};
|
|
||||||
startIndex: string;
|
|
||||||
itemsPerPage: string;
|
|
||||||
searchTerms: string;
|
|
||||||
items: {
|
|
||||||
title: string;
|
|
||||||
link: string;
|
|
||||||
code: string;
|
|
||||||
description: string;
|
|
||||||
pubDate: string;
|
|
||||||
image?: string;
|
|
||||||
size: string;
|
|
||||||
sizename: string;
|
|
||||||
guid: string;
|
|
||||||
faviconUrl: string;
|
|
||||||
host: string;
|
|
||||||
path: string;
|
|
||||||
file: string;
|
|
||||||
urlhash: string;
|
|
||||||
ranking: string;
|
|
||||||
}[];
|
|
||||||
navigation: {
|
|
||||||
facetname: string;
|
|
||||||
displayname: string;
|
|
||||||
type: string;
|
|
||||||
min: string;
|
|
||||||
max: string;
|
|
||||||
mean: string;
|
|
||||||
elements: {
|
|
||||||
name: string;
|
|
||||||
count: string;
|
|
||||||
modifier: string;
|
|
||||||
url: string;
|
|
||||||
}[];
|
|
||||||
}[];
|
|
||||||
}[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const searchYaCy = async (query: string, numResults: number = 20) => {
|
|
||||||
try {
|
|
||||||
const yacyBaseUrl = getYacyJsonEndpoint();
|
|
||||||
|
|
||||||
const url = new URL(`${yacyBaseUrl}/yacysearch.json`);
|
|
||||||
url.searchParams.append('query', query);
|
|
||||||
url.searchParams.append('count', numResults.toString());
|
|
||||||
|
|
||||||
const res = await axios.get(url.toString());
|
|
||||||
|
|
||||||
const originalres = res.data as YaCySearchResult;
|
|
||||||
|
|
||||||
const results = originalres.channels[0].items.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.link,
|
|
||||||
content: item.description,
|
|
||||||
img_src: item.image || null,
|
|
||||||
pubDate: item.pubDate,
|
|
||||||
host: item.host,
|
|
||||||
}));
|
|
||||||
|
|
||||||
return { results, originalres };
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error.response?.data
|
|
||||||
? JSON.stringify(error.response.data, null, 2)
|
|
||||||
: error.message || 'Unknown error';
|
|
||||||
throw new Error(`YaCy Error: ${errorMessage}`);
|
|
||||||
}
|
|
||||||
};
|
|
@@ -1,5 +1,5 @@
|
|||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import { getSearxngApiEndpoint } from '../../config';
|
import { getSearxngApiEndpoint } from '../config';
|
||||||
|
|
||||||
interface SearxngSearchOptions {
|
interface SearxngSearchOptions {
|
||||||
categories?: string[];
|
categories?: string[];
|
@@ -13,16 +13,6 @@ import {
|
|||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
getSearchEngineBackend,
|
|
||||||
getImageSearchEngineBackend,
|
|
||||||
getVideoSearchEngineBackend,
|
|
||||||
getNewsSearchEngineBackend,
|
|
||||||
getSearxngApiEndpoint,
|
|
||||||
getGoogleApiKey,
|
|
||||||
getGoogleCseId,
|
|
||||||
getBingSubscriptionKey,
|
|
||||||
getBraveApiKey,
|
|
||||||
getYacyJsonEndpoint,
|
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
@@ -71,21 +61,6 @@ router.get('/', async (_, res) => {
|
|||||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||||
|
|
||||||
// Add search engine configuration
|
|
||||||
config['searchEngineBackends'] = {
|
|
||||||
search: getSearchEngineBackend(),
|
|
||||||
image: getImageSearchEngineBackend(),
|
|
||||||
video: getVideoSearchEngineBackend(),
|
|
||||||
news: getNewsSearchEngineBackend(),
|
|
||||||
};
|
|
||||||
|
|
||||||
config['searxngEndpoint'] = getSearxngApiEndpoint();
|
|
||||||
config['googleApiKey'] = getGoogleApiKey();
|
|
||||||
config['googleCseId'] = getGoogleCseId();
|
|
||||||
config['bingSubscriptionKey'] = getBingSubscriptionKey();
|
|
||||||
config['braveApiKey'] = getBraveApiKey();
|
|
||||||
config['yacyEndpoint'] = getYacyJsonEndpoint();
|
|
||||||
|
|
||||||
res.status(200).json(config);
|
res.status(200).json(config);
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
res.status(500).json({ message: 'An error has occurred.' });
|
res.status(500).json({ message: 'An error has occurred.' });
|
||||||
@@ -119,30 +94,6 @@ router.post('/', async (req, res) => {
|
|||||||
MODEL_NAME: config.customOpenaiModelName,
|
MODEL_NAME: config.customOpenaiModelName,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SEARCH_ENGINE_BACKENDS: config.searchEngineBackends ? {
|
|
||||||
SEARCH: config.searchEngineBackends.search,
|
|
||||||
IMAGE: config.searchEngineBackends.image,
|
|
||||||
VIDEO: config.searchEngineBackends.video,
|
|
||||||
NEWS: config.searchEngineBackends.news,
|
|
||||||
} : undefined,
|
|
||||||
SEARCH_ENGINES: {
|
|
||||||
GOOGLE: {
|
|
||||||
API_KEY: config.googleApiKey,
|
|
||||||
CSE_ID: config.googleCseId,
|
|
||||||
},
|
|
||||||
SEARXNG: {
|
|
||||||
ENDPOINT: config.searxngEndpoint,
|
|
||||||
},
|
|
||||||
BING: {
|
|
||||||
SUBSCRIPTION_KEY: config.bingSubscriptionKey,
|
|
||||||
},
|
|
||||||
BRAVE: {
|
|
||||||
API_KEY: config.braveApiKey,
|
|
||||||
},
|
|
||||||
YACY: {
|
|
||||||
ENDPOINT: config.yacyEndpoint,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
updateConfig(updatedConfig);
|
updateConfig(updatedConfig);
|
||||||
|
@@ -1,125 +1,42 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { getNewsSearchEngineBackend } from '../config';
|
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
async function performSearch(query: string, site: string) {
|
|
||||||
const searchEngine = getNewsSearchEngineBackend();
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'google': {
|
|
||||||
const googleResult = await searchGooglePSE(query);
|
|
||||||
|
|
||||||
return googleResult.originalres.map((item) => {
|
|
||||||
const imageSources = [
|
|
||||||
item.pagemap?.cse_image?.[0]?.src,
|
|
||||||
item.pagemap?.cse_thumbnail?.[0]?.src,
|
|
||||||
item.pagemap?.metatags?.[0]?.['og:image'],
|
|
||||||
item.pagemap?.metatags?.[0]?.['twitter:image'],
|
|
||||||
item.pagemap?.metatags?.[0]?.['image'],
|
|
||||||
].filter(Boolean); // Remove undefined values
|
|
||||||
|
|
||||||
return {
|
|
||||||
title: item.title,
|
|
||||||
url: item.link,
|
|
||||||
content: item.snippet,
|
|
||||||
thumbnail: imageSources[0], // First available image
|
|
||||||
img_src: imageSources[0], // Same as thumbnail for consistency
|
|
||||||
iframe_src: null,
|
|
||||||
author: item.pagemap?.metatags?.[0]?.['og:site_name'] || site,
|
|
||||||
publishedDate:
|
|
||||||
item.pagemap?.metatags?.[0]?.['article:published_time'],
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'searxng': {
|
|
||||||
const searxResult = await searchSearxng(query, {
|
|
||||||
engines: ['bing news'],
|
|
||||||
pageno: 1,
|
|
||||||
});
|
|
||||||
return searxResult.results;
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'brave': {
|
|
||||||
const braveResult = await searchBraveAPI(query);
|
|
||||||
return braveResult.results.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.content,
|
|
||||||
thumbnail: item.img_src,
|
|
||||||
img_src: item.img_src,
|
|
||||||
iframe_src: null,
|
|
||||||
author: item.meta?.fetched || site,
|
|
||||||
publishedDate: item.meta?.lastCrawled,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'yacy': {
|
|
||||||
const yacyResult = await searchYaCy(query);
|
|
||||||
return yacyResult.results.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.content,
|
|
||||||
thumbnail: item.img_src,
|
|
||||||
img_src: item.img_src,
|
|
||||||
iframe_src: null,
|
|
||||||
author: item?.host || site,
|
|
||||||
publishedDate: item?.pubDate,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'bing': {
|
|
||||||
const bingResult = await searchBingAPI(query);
|
|
||||||
return bingResult.results.map((item) => ({
|
|
||||||
title: item.title,
|
|
||||||
url: item.url,
|
|
||||||
content: item.content,
|
|
||||||
thumbnail: item.img_src,
|
|
||||||
img_src: item.img_src,
|
|
||||||
iframe_src: null,
|
|
||||||
author: item?.publisher || site,
|
|
||||||
publishedDate: item?.datePublished,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
router.get('/', async (req, res) => {
|
router.get('/', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const queries = [
|
|
||||||
{ site: 'businessinsider.com', topic: 'AI' },
|
|
||||||
{ site: 'www.exchangewire.com', topic: 'AI' },
|
|
||||||
{ site: 'yahoo.com', topic: 'AI' },
|
|
||||||
{ site: 'businessinsider.com', topic: 'tech' },
|
|
||||||
{ site: 'www.exchangewire.com', topic: 'tech' },
|
|
||||||
{ site: 'yahoo.com', topic: 'tech' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const data = (
|
const data = (
|
||||||
await Promise.all(
|
await Promise.all([
|
||||||
queries.map(async ({ site, topic }) => {
|
searchSearxng('site:businessinsider.com AI', {
|
||||||
try {
|
engines: ['bing news'],
|
||||||
const query = `site:${site} ${topic}`;
|
pageno: 1,
|
||||||
return await performSearch(query, site);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Error searching ${site}: ${error.message}`);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}),
|
}),
|
||||||
|
searchSearxng('site:www.exchangewire.com AI', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:yahoo.com AI', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:businessinsider.com tech', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:www.exchangewire.com tech', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
searchSearxng('site:yahoo.com tech', {
|
||||||
|
engines: ['bing news'],
|
||||||
|
pageno: 1,
|
||||||
|
}),
|
||||||
|
])
|
||||||
)
|
)
|
||||||
)
|
.map((result) => result.results)
|
||||||
.flat()
|
.flat()
|
||||||
.sort(() => Math.random() - 0.5)
|
.sort(() => Math.random() - 0.5);
|
||||||
.filter((item) => item.title && item.url && item.content);
|
|
||||||
|
|
||||||
return res.json({ blogs: data });
|
return res.json({ blogs: data });
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
|
@@ -17,12 +17,7 @@ import LineListOutputParser from '../lib/outputParsers/listLineOutputParser';
|
|||||||
import LineOutputParser from '../lib/outputParsers/lineOutputParser';
|
import LineOutputParser from '../lib/outputParsers/lineOutputParser';
|
||||||
import { getDocumentsFromLinks } from '../utils/documents';
|
import { getDocumentsFromLinks } from '../utils/documents';
|
||||||
import { Document } from 'langchain/document';
|
import { Document } from 'langchain/document';
|
||||||
import { searchSearxng } from '../lib/searchEngines/searxng';
|
import { searchSearxng } from '../lib/searxng';
|
||||||
import { searchGooglePSE } from '../lib/searchEngines/google_pse';
|
|
||||||
import { searchBingAPI } from '../lib/searchEngines/bing';
|
|
||||||
import { searchBraveAPI } from '../lib/searchEngines/brave';
|
|
||||||
import { searchYaCy } from '../lib/searchEngines/yacy';
|
|
||||||
import { getSearchEngineBackend } from '../config';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import computeSimilarity from '../utils/computeSimilarity';
|
import computeSimilarity from '../utils/computeSimilarity';
|
||||||
@@ -208,37 +203,10 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
return { query: question, docs: docs };
|
return { query: question, docs: docs };
|
||||||
} else {
|
} else {
|
||||||
const searchEngine = getSearchEngineBackend();
|
const res = await searchSearxng(question, {
|
||||||
|
|
||||||
let res;
|
|
||||||
switch (searchEngine) {
|
|
||||||
case 'searxng':
|
|
||||||
res = await searchSearxng(question, {
|
|
||||||
language: 'en',
|
language: 'en',
|
||||||
engines: this.config.activeEngines,
|
engines: this.config.activeEngines,
|
||||||
});
|
});
|
||||||
break;
|
|
||||||
case 'google':
|
|
||||||
res = await searchGooglePSE(question);
|
|
||||||
break;
|
|
||||||
case 'bing':
|
|
||||||
res = await searchBingAPI(question);
|
|
||||||
break;
|
|
||||||
case 'brave':
|
|
||||||
res = await searchBraveAPI(question);
|
|
||||||
break;
|
|
||||||
case 'yacy':
|
|
||||||
res = await searchYaCy(question);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown search engine ${searchEngine}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!res?.results) {
|
|
||||||
throw new Error(
|
|
||||||
`No results found for search engine: ${searchEngine}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const documents = res.results.map(
|
const documents = res.results.map(
|
||||||
(result) =>
|
(result) =>
|
||||||
|
@@ -23,18 +23,6 @@ interface SettingsType {
|
|||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
customOpenaiModelName: string;
|
customOpenaiModelName: string;
|
||||||
searchEngineBackends: {
|
|
||||||
search: string;
|
|
||||||
image: string;
|
|
||||||
video: string;
|
|
||||||
news: string;
|
|
||||||
};
|
|
||||||
searxngEndpoint: string;
|
|
||||||
googleApiKey: string;
|
|
||||||
googleCseId: string;
|
|
||||||
bingSubscriptionKey: string;
|
|
||||||
braveApiKey: string;
|
|
||||||
yacyEndpoint: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
||||||
@@ -124,12 +112,6 @@ const Page = () => {
|
|||||||
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
||||||
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
||||||
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
||||||
const [searchEngineBackends, setSearchEngineBackends] = useState({
|
|
||||||
search: '',
|
|
||||||
image: '',
|
|
||||||
video: '',
|
|
||||||
news: '',
|
|
||||||
});
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
@@ -143,16 +125,6 @@ const Page = () => {
|
|||||||
const data = (await res.json()) as SettingsType;
|
const data = (await res.json()) as SettingsType;
|
||||||
setConfig(data);
|
setConfig(data);
|
||||||
|
|
||||||
// Set search engine backends if they exist in the response
|
|
||||||
if (data.searchEngineBackends) {
|
|
||||||
setSearchEngineBackends({
|
|
||||||
search: data.searchEngineBackends.search || '',
|
|
||||||
image: data.searchEngineBackends.image || '',
|
|
||||||
video: data.searchEngineBackends.video || '',
|
|
||||||
news: data.searchEngineBackends.news || '',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
|
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
|
||||||
const embeddingModelProvidersKeys = Object.keys(
|
const embeddingModelProvidersKeys = Object.keys(
|
||||||
data.embeddingModelProviders || {},
|
data.embeddingModelProviders || {},
|
||||||
@@ -359,8 +331,6 @@ const Page = () => {
|
|||||||
localStorage.setItem('embeddingModelProvider', value);
|
localStorage.setItem('embeddingModelProvider', value);
|
||||||
} else if (key === 'embeddingModel') {
|
} else if (key === 'embeddingModel') {
|
||||||
localStorage.setItem('embeddingModel', value);
|
localStorage.setItem('embeddingModel', value);
|
||||||
} else if (key === 'searchEngineBackends') {
|
|
||||||
localStorage.setItem('searchEngineBackends', value);
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Failed to save:', err);
|
console.error('Failed to save:', err);
|
||||||
@@ -823,234 +793,6 @@ const Page = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</SettingsSection>
|
</SettingsSection>
|
||||||
|
|
||||||
<SettingsSection title="Search Engine Settings">
|
|
||||||
<div className="flex flex-col space-y-4">
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Default Search Engine
|
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={searchEngineBackends.search}
|
|
||||||
onChange={(e) => {
|
|
||||||
const value = e.target.value;
|
|
||||||
setSearchEngineBackends((prev) => ({
|
|
||||||
...prev,
|
|
||||||
search: value,
|
|
||||||
}));
|
|
||||||
saveConfig('searchEngineBackends', {
|
|
||||||
...searchEngineBackends,
|
|
||||||
search: value,
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
options={[
|
|
||||||
{ value: 'searxng', label: 'SearXNG' },
|
|
||||||
{ value: 'google', label: 'Google' },
|
|
||||||
{ value: 'bing', label: 'Bing' },
|
|
||||||
{ value: 'brave', label: 'Brave' },
|
|
||||||
{ value: 'yacy', label: 'YaCy' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Image Search Engine
|
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={searchEngineBackends.image}
|
|
||||||
onChange={(e) => {
|
|
||||||
const value = e.target.value;
|
|
||||||
setSearchEngineBackends((prev) => ({
|
|
||||||
...prev,
|
|
||||||
image: value,
|
|
||||||
}));
|
|
||||||
saveConfig('searchEngineBackends', {
|
|
||||||
...searchEngineBackends,
|
|
||||||
image: value,
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
options={[
|
|
||||||
{ value: '', label: 'Use Default Search Engine' },
|
|
||||||
{ value: 'searxng', label: 'SearXNG' },
|
|
||||||
{ value: 'google', label: 'Google' },
|
|
||||||
{ value: 'bing', label: 'Bing' },
|
|
||||||
{ value: 'brave', label: 'Brave' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Video Search Engine
|
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={searchEngineBackends.video}
|
|
||||||
onChange={(e) => {
|
|
||||||
const value = e.target.value;
|
|
||||||
setSearchEngineBackends((prev) => ({
|
|
||||||
...prev,
|
|
||||||
video: value,
|
|
||||||
}));
|
|
||||||
saveConfig('searchEngineBackends', {
|
|
||||||
...searchEngineBackends,
|
|
||||||
video: value,
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
options={[
|
|
||||||
{ value: '', label: 'Use Default Search Engine' },
|
|
||||||
{ value: 'searxng', label: 'SearXNG' },
|
|
||||||
{ value: 'google', label: 'Google' },
|
|
||||||
{ value: 'bing', label: 'Bing' },
|
|
||||||
{ value: 'brave', label: 'Brave' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
News Search Engine
|
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={searchEngineBackends.news}
|
|
||||||
onChange={(e) => {
|
|
||||||
const value = e.target.value;
|
|
||||||
setSearchEngineBackends((prev) => ({
|
|
||||||
...prev,
|
|
||||||
news: value,
|
|
||||||
}));
|
|
||||||
saveConfig('searchEngineBackends', {
|
|
||||||
...searchEngineBackends,
|
|
||||||
news: value,
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
options={[
|
|
||||||
{ value: '', label: 'Use Default Search Engine' },
|
|
||||||
{ value: 'searxng', label: 'SearXNG' },
|
|
||||||
{ value: 'google', label: 'Google' },
|
|
||||||
{ value: 'bing', label: 'Bing' },
|
|
||||||
{ value: 'brave', label: 'Brave' },
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="pt-4 border-t border-light-200 dark:border-dark-200">
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
SearXNG Endpoint
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="SearXNG API Endpoint"
|
|
||||||
value={config.searxngEndpoint || ''}
|
|
||||||
isSaving={savingStates['searxngEndpoint']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
searxngEndpoint: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('searxngEndpoint', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Google API Key
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="Google API Key"
|
|
||||||
value={config.googleApiKey || ''}
|
|
||||||
isSaving={savingStates['googleApiKey']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
googleApiKey: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('googleApiKey', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Google CSE ID
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="Google Custom Search Engine ID"
|
|
||||||
value={config.googleCseId || ''}
|
|
||||||
isSaving={savingStates['googleCseId']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
googleCseId: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('googleCseId', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Bing Subscription Key
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="Bing Subscription Key"
|
|
||||||
value={config.bingSubscriptionKey || ''}
|
|
||||||
isSaving={savingStates['bingSubscriptionKey']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
bingSubscriptionKey: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('bingSubscriptionKey', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Brave API Key
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="Brave API Key"
|
|
||||||
value={config.braveApiKey || ''}
|
|
||||||
isSaving={savingStates['braveApiKey']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
braveApiKey: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('braveApiKey', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
YaCy Endpoint
|
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
|
||||||
placeholder="YaCy API Endpoint"
|
|
||||||
value={config.yacyEndpoint || ''}
|
|
||||||
isSaving={savingStates['yacyEndpoint']}
|
|
||||||
onChange={(e) => {
|
|
||||||
setConfig((prev) => ({
|
|
||||||
...prev!,
|
|
||||||
yacyEndpoint: e.target.value,
|
|
||||||
}));
|
|
||||||
}}
|
|
||||||
onSave={(value) => saveConfig('yacyEndpoint', value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</SettingsSection>
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
)}
|
)}
|
||||||
|
@@ -48,11 +48,17 @@ const Chat = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
const scroll = () => {
|
||||||
messageEnd.current?.scrollIntoView({ behavior: 'smooth' });
|
messageEnd.current?.scrollIntoView({ behavior: 'smooth' });
|
||||||
|
};
|
||||||
|
|
||||||
if (messages.length === 1) {
|
if (messages.length === 1) {
|
||||||
document.title = `${messages[0].content.substring(0, 30)} - Perplexica`;
|
document.title = `${messages[0].content.substring(0, 30)} - Perplexica`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (messages[messages.length - 1]?.role == 'user') {
|
||||||
|
scroll();
|
||||||
|
}
|
||||||
}, [messages]);
|
}, [messages]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@@ -368,7 +368,7 @@ const loadMessages = async (
|
|||||||
|
|
||||||
const ChatWindow = ({ id }: { id?: string }) => {
|
const ChatWindow = ({ id }: { id?: string }) => {
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const initialMessage = searchParams.get('q');
|
const initialMessage = searchParams?.get('q');
|
||||||
|
|
||||||
const [chatId, setChatId] = useState<string | undefined>(id);
|
const [chatId, setChatId] = useState<string | undefined>(id);
|
||||||
const [newChatCreated, setNewChatCreated] = useState(false);
|
const [newChatCreated, setNewChatCreated] = useState(false);
|
||||||
@@ -378,7 +378,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
|
|
||||||
const [isWSReady, setIsWSReady] = useState(false);
|
const [isWSReady, setIsWSReady] = useState(false);
|
||||||
const ws = useSocket(
|
const ws = useSocket(
|
||||||
process.env.NEXT_PUBLIC_WS_URL!,
|
process.env.NEXT_PUBLIC_WS_URL === 'auto'
|
||||||
|
? `${window.location.protocol === 'https:' ? 'wss:' : 'ws:'}//${window.location.host}/ws`
|
||||||
|
: process.env.NEXT_PUBLIC_WS_URL!,
|
||||||
setIsWSReady,
|
setIsWSReady,
|
||||||
setHasError,
|
setHasError,
|
||||||
);
|
);
|
||||||
|
Reference in New Issue
Block a user