Compare commits

..

109 Commits

Author SHA1 Message Date
ItzCrazyKns
956a768a86 feat(app): handle new architecture 2025-11-23 19:58:46 +05:30
ItzCrazyKns
e0ba476ca4 feat(optimization): enable quality 2025-11-23 19:49:54 +05:30
ItzCrazyKns
cba3f43b19 feat(search-agent): add search agent flow 2025-11-23 19:49:36 +05:30
ItzCrazyKns
ec06a2b9ff feat(researcher): use patching, streaming 2025-11-23 19:48:44 +05:30
ItzCrazyKns
1b4e883f57 feat(prompts): add writer prompt 2025-11-23 19:48:12 +05:30
ItzCrazyKns
f15802b688 feat(prompts): update research prompt 2025-11-23 19:48:05 +05:30
ItzCrazyKns
8dec689a45 feat(prompts): update classifier prompt 2025-11-23 19:47:28 +05:30
ItzCrazyKns
730ee0ff41 feat(intents): add private search 2025-11-23 19:47:18 +05:30
ItzCrazyKns
7c9258cfc9 feat(intents): update intent prompt 2025-11-23 19:47:11 +05:30
ItzCrazyKns
4e7143ce0c feat(app): add initial widgets 2025-11-23 19:46:42 +05:30
ItzCrazyKns
d5f62f2dca feat(chat): prevent auto-scroll unless message sent 2025-11-23 19:46:02 +05:30
ItzCrazyKns
b7b280637f feat(providers): update ollama context window, temp 2025-11-23 19:26:47 +05:30
ItzCrazyKns
e22a39fd73 feat(routes): update routes to handle new llm types 2025-11-23 19:24:17 +05:30
ItzCrazyKns
6da6acbcd0 feat(agents): update media agents 2025-11-23 19:23:42 +05:30
ItzCrazyKns
0ac8569a9e feat(agents): update suggestion generator 2025-11-23 19:23:18 +05:30
ItzCrazyKns
74bc08d189 Refactor types and imports for consistency 2025-11-23 19:22:27 +05:30
ItzCrazyKns
d7dd17c069 feat(app): fix type resolving issues 2025-11-23 19:22:11 +05:30
ItzCrazyKns
6d35d60b49 Remove unused output parsers and document utility 2025-11-23 19:21:16 +05:30
ItzCrazyKns
d6c364fdcb feat(models): remove old providers 2025-11-22 22:23:10 +05:30
ItzCrazyKns
8d04f636d0 Delete index.ts 2025-11-22 22:22:43 +05:30
ItzCrazyKns
9ac2da3607 feat(app): remove old search agent 2025-11-22 22:22:34 +05:30
ItzCrazyKns
55cf88822d feat(package): add modules 2025-11-21 23:58:04 +05:30
ItzCrazyKns
c4acc83fd5 feat(agents): add search agent 2025-11-21 23:57:50 +05:30
ItzCrazyKns
08feb18197 feat(search-agent): add researcher, research actions 2025-11-21 23:57:29 +05:30
ItzCrazyKns
0df0114e76 feat(prompts): add researcher prompt 2025-11-21 23:54:30 +05:30
ItzCrazyKns
4016b21bdf Update formatHistory.ts 2025-11-21 23:54:16 +05:30
ItzCrazyKns
f7a43b3cb9 feat(session): use blocks, use rfc6902 for stream with patching 2025-11-21 23:52:55 +05:30
ItzCrazyKns
70bcd8c6f1 feat(types): add artifact to block, add more blocks 2025-11-21 23:51:09 +05:30
ItzCrazyKns
2568088341 feat(db): add new migration files 2025-11-21 23:49:52 +05:30
ItzCrazyKns
a494d4c329 feat(app): fix migration errors 2025-11-21 23:49:27 +05:30
ItzCrazyKns
9b85c63a80 feat(db): migrate schema 2025-11-21 23:49:14 +05:30
ItzCrazyKns
1614cfa5e5 feat(app): add widgets 2025-11-20 14:55:50 +05:30
ItzCrazyKns
036b44611f feat(search): add classifier 2025-11-20 14:55:24 +05:30
ItzCrazyKns
8b515201f3 feat(app): add search types 2025-11-20 14:53:03 +05:30
ItzCrazyKns
cbcb03c7ac feat(llm): update return type to partial 2025-11-20 14:52:41 +05:30
ItzCrazyKns
afc68ca91f feat(ollamaLLM): disable thinking in obj mode 2025-11-20 14:52:24 +05:30
ItzCrazyKns
3cc8882b28 feat(prompts): add classifier prompt 2025-11-20 14:51:49 +05:30
ItzCrazyKns
c3830795cb feat(app): add new session manager 2025-11-20 14:51:17 +05:30
ItzCrazyKns
f44ad973aa feat(types): add llm types 2025-11-18 14:39:43 +05:30
ItzCrazyKns
4bcbdad6cb feat(providers): implement custom classes 2025-11-18 14:39:04 +05:30
ItzCrazyKns
5272c7fd3e feat(models): add new base classes 2025-11-18 14:38:12 +05:30
ItzCrazyKns
657a577ec8 feat(app): enhance UI 2025-11-18 14:37:41 +05:30
ItzCrazyKns
f6dac43d7a feat(types): add message & chunk type 2025-11-18 01:17:19 +05:30
ItzCrazyKns
a00f2231d4 feat(chat-window): remove loading state 2025-11-14 23:17:41 +05:30
ItzCrazyKns
1da9b7655c Merge branch 'canary' into feat/improve-search-architecture 2025-11-14 14:38:58 +05:30
ItzCrazyKns
2edef888a3 Merge branch 'master' into canary 2025-11-14 13:29:22 +05:30
ItzCrazyKns
2dc8078848 Update Exa sponsor image and README styling 2025-11-14 13:23:50 +05:30
ItzCrazyKns
8df81c20cf Update README.md 2025-11-14 13:19:49 +05:30
ItzCrazyKns
34bd02236d Update README.md 2025-11-14 13:17:52 +05:30
ItzCrazyKns
2430376a0c feat(readme): update sponsers 2025-11-14 13:15:59 +05:30
ItzCrazyKns
bd5628b390 feat(package): bump langchain package 2025-11-14 11:45:48 +05:30
ItzCrazyKns
3d5d04eda0 Merge branch 'canary' into feat/improve-search-architecture 2025-11-13 11:54:24 +05:30
ItzCrazyKns
07a17925b1 feat(media-search): supply full history 2025-11-13 11:53:53 +05:30
ItzCrazyKns
3bcf646af1 feat(search-route): handle history processing after llm validation 2025-11-13 11:52:12 +05:30
ItzCrazyKns
e499c0b96e feat(app): migrate video search chain 2025-11-13 11:51:25 +05:30
ItzCrazyKns
33b736e1e8 feat(app): migrate image search chain 2025-11-13 11:51:13 +05:30
Kushagra Srivastava
5e1746f646 Merge pull request #928 from ItzCrazyKns/master
Merge master into canary
2025-11-13 11:49:42 +05:30
ItzCrazyKns
41fe009847 feat(app): migrate suggestion chain 2025-11-13 11:47:28 +05:30
ItzCrazyKns
1a8889c71c feat(app): add new agents directory 2025-11-10 16:45:48 +05:30
ItzCrazyKns
70c1f7230c feat(assets): remove old preview 2025-11-08 21:31:56 +05:30
ItzCrazyKns
c0771095a6 feat(app): lint & beautify 2025-10-30 17:21:48 +05:30
ItzCrazyKns
0856896aff feat(settings): fix text size, enhance UI 2025-10-30 17:21:40 +05:30
ItzCrazyKns
3da53aed03 Merge branch 'master' of https://github.com/ItzCrazyKns/Perplexica 2025-10-30 11:36:30 +05:30
ItzCrazyKns
244675759c feat(config): add getAutoMediaSearch, update uses 2025-10-30 11:29:14 +05:30
ItzCrazyKns
ce6a37aaff feat(settingsFields): add switch field 2025-10-30 11:28:15 +05:30
ItzCrazyKns
c3abba8462 feat(settings): separate personalization & preferences 2025-10-29 23:13:51 +05:30
ItzCrazyKns
f709aa8224 feat(config): add new switch config field 2025-10-29 23:12:09 +05:30
Kushagra Srivastava
22695f4ef6 Merge pull request #916 from skoved/gemini-embedding-fix
fix: list all available gemini embedding models
2025-10-28 21:56:44 +05:30
skoved
75ef2e0282 fix: list all available gemini embedding models
the new settings window does not list all available gemini embedding models. this happens because some gemini embedding models have `embedContent` instead of `embedText`
2025-10-28 11:31:41 -04:00
ItzCrazyKns
b0d97c4c83 feat(readme): revert to screenshot for now 2025-10-27 16:49:57 +05:30
ItzCrazyKns
6527388e25 Update demo.gif 2025-10-27 15:27:50 +05:30
ItzCrazyKns
7397e33f29 feat(app): rename providers to connection, enhance UX 2025-10-27 15:08:50 +05:30
ItzCrazyKns
f6ffa9ebe0 feat(readme): enhance readme 2025-10-27 13:09:59 +05:30
ItzCrazyKns
f9e675823b Create demo.gif 2025-10-27 12:57:34 +05:30
ItzCrazyKns
2e736613c5 Merge branch 'master' into canary 2025-10-27 11:43:18 +05:30
ItzCrazyKns
295334b195 feat(app): fix empty message being sent 2025-10-24 23:40:01 +05:30
ItzCrazyKns
b106abd77f feat(package): bump version 2025-10-24 23:00:07 +05:30
ItzCrazyKns
2d80fc400d feat(app): lint & beautify 2025-10-24 22:58:10 +05:30
ItzCrazyKns
097a5c55c6 feat(layout): add everything inside chat provider 2025-10-24 22:57:56 +05:30
ItzCrazyKns
d0719429b4 feat(app): fix issues with model selection 2025-10-24 22:56:23 +05:30
ItzCrazyKns
600d4ceb29 feat(hf-transformer): use langchain's inbuilt transformer class 2025-10-23 23:06:05 +05:30
ItzCrazyKns
4f50462f1d feat(package): bump version 2025-10-23 21:04:33 +05:30
ItzCrazyKns
231bc22a36 feat(docker): update searxng build script 2025-10-23 19:07:22 +05:30
ItzCrazyKns
046daf442a feat(docker): update searxng build script 2025-10-23 19:06:27 +05:30
ItzCrazyKns
cb1d85e458 feat(readme): add volumes 2025-10-21 16:57:57 +05:30
ItzCrazyKns
ce78b4ff62 feat(app): show "add model" button 2025-10-21 16:32:40 +05:30
ItzCrazyKns
88ae67065b feat(config): add measurement unit 2025-10-21 15:59:15 +05:30
ItzCrazyKns
f35d12f94c Update perplexica-screenshot.png 2025-10-21 15:26:29 +05:30
ItzCrazyKns
3d17975d83 feat(model-select): use values from localStorage 2025-10-21 15:25:38 +05:30
Kushagra Srivastava
950717e0cf Delete app.dockerfile 2025-10-21 15:13:17 +05:30
Kushagra Srivastava
4f39b5746a Merge pull request #906 from ItzCrazyKns/canary
Release v1.11.0
2025-10-21 15:07:55 +05:30
ItzCrazyKns
a01fce4e64 feat(package): bump version 2025-10-21 15:03:47 +05:30
ItzCrazyKns
92ff47110d feat(app): rename standalone to slim 2025-10-21 15:03:15 +05:30
ItzCrazyKns
82efd35b55 feat(setup-config): only allow finalization when chat model exists 2025-10-21 14:24:44 +05:30
ItzCrazyKns
3d950bac07 feat(app): update documentation 2025-10-21 13:44:07 +05:30
ItzCrazyKns
77672003ff feat(app): remove docker compose, build standalone images 2025-10-21 13:43:55 +05:30
ItzCrazyKns
e9bd2a8032 feat(settingsButtonMobile): add size 2025-10-21 12:30:34 +05:30
ItzCrazyKns
49fed3e228 feat(setup-config): add model selection state 2025-10-21 12:23:35 +05:30
ItzCrazyKns
7fb7fb9692 feat(app): fix sizes & placement for smaller screens 2025-10-21 12:23:18 +05:30
ItzCrazyKns
ff37225253 feat(models-section): allow selecting chat model 2025-10-21 12:22:37 +05:30
ItzCrazyKns
3b745868b2 feat(app): add mobile settings button 2025-10-21 12:22:22 +05:30
ItzCrazyKns
c945bf1fc3 feat(settings): add textarea type, add systemInstructions 2025-10-21 12:22:06 +05:30
ItzCrazyKns
672fc3c3a8 feat(app): fix build errors 2025-10-20 16:39:38 +05:30
ItzCrazyKns
67c2672f39 feat(searxng): use fetch instead of axios 2025-10-20 16:36:15 +05:30
ItzCrazyKns
334326744c feat(app): use new packages, fix types 2025-10-20 16:36:04 +05:30
ItzCrazyKns
042ce33cf4 feat(providers): add rest of the providers 2025-10-20 16:35:44 +05:30
ItzCrazyKns
22b9a48b26 feat(config): use provider name without number on load from env 2025-10-20 16:35:12 +05:30
ItzCrazyKns
e024d46971 feat(chat): fix typo 2025-10-20 16:34:49 +05:30
ItzCrazyKns
af36f15f3b feat(package): update packages 2025-10-20 16:33:56 +05:30
122 changed files with 6876 additions and 2876 deletions

BIN
.assets/demo.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

After

Width:  |  Height:  |  Size: 2.1 MiB

BIN
.assets/sponsers/exa.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

BIN
.assets/sponsers/warp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 433 KiB

View File

@@ -11,6 +11,13 @@ on:
jobs: jobs:
build-amd64: build-amd64:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
variant:
- name: full
dockerfile: Dockerfile
- name: slim
dockerfile: Dockerfile.slim
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v3
@@ -31,47 +38,54 @@ jobs:
id: version id: version
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Build and push AMD64 Docker image - name: Build and push AMD64 Docker image (master)
if: github.ref == 'refs/heads/master' && github.event_name == 'push' if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: | run: |
DOCKERFILE=app.dockerfile DOCKERFILE=${{ matrix.variant.dockerfile }}
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant.name }}
docker buildx build --platform linux/amd64 \ docker buildx build --platform linux/amd64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:amd64 \ --cache-from=type=registry,ref=itzcrazykns1337/perplexica:${VARIANT}-amd64 \
--cache-to=type=inline \ --cache-to=type=inline \
--provenance false \ --provenance false \
-f $DOCKERFILE \ -f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:amd64 \ -t itzcrazykns1337/perplexica:${VARIANT}-amd64 \
--push . --push .
- name: Build and push AMD64 Canary Docker image - name: Build and push AMD64 Canary Docker image
if: github.ref == 'refs/heads/canary' && github.event_name == 'push' if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
run: | run: |
DOCKERFILE=app.dockerfile DOCKERFILE=${{ matrix.variant.dockerfile }}
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant.name }}
docker buildx build --platform linux/amd64 \ docker buildx build --platform linux/amd64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \ --cache-from=type=registry,ref=itzcrazykns1337/perplexica:${VARIANT}-canary-amd64 \
--cache-to=type=inline \ --cache-to=type=inline \
--provenance false \ --provenance false \
-f $DOCKERFILE \ -f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \ -t itzcrazykns1337/perplexica:${VARIANT}-canary-amd64 \
--push . --push .
- name: Build and push AMD64 release Docker image - name: Build and push AMD64 release Docker image
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |
DOCKERFILE=app.dockerfile DOCKERFILE=${{ matrix.variant.dockerfile }}
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant.name }}
docker buildx build --platform linux/amd64 \ docker buildx build --platform linux/amd64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-amd64 \ --cache-from=type=registry,ref=itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-amd64 \
--cache-to=type=inline \ --cache-to=type=inline \
--provenance false \ --provenance false \
-f $DOCKERFILE \ -f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-amd64 \ -t itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-amd64 \
--push . --push .
build-arm64: build-arm64:
runs-on: ubuntu-24.04-arm runs-on: ubuntu-24.04-arm
strategy:
matrix:
variant:
- name: full
dockerfile: Dockerfile
- name: slim
dockerfile: Dockerfile.slim
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v3
@@ -92,48 +106,51 @@ jobs:
id: version id: version
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Build and push ARM64 Docker image - name: Build and push ARM64 Docker image (master)
if: github.ref == 'refs/heads/master' && github.event_name == 'push' if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: | run: |
DOCKERFILE=app.dockerfile DOCKERFILE=${{ matrix.variant.dockerfile }}
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant.name }}
docker buildx build --platform linux/arm64 \ docker buildx build --platform linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:arm64 \ --cache-from=type=registry,ref=itzcrazykns1337/perplexica:${VARIANT}-arm64 \
--cache-to=type=inline \ --cache-to=type=inline \
--provenance false \ --provenance false \
-f $DOCKERFILE \ -f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:arm64 \ -t itzcrazykns1337/perplexica:${VARIANT}-arm64 \
--push . --push .
- name: Build and push ARM64 Canary Docker image - name: Build and push ARM64 Canary Docker image
if: github.ref == 'refs/heads/canary' && github.event_name == 'push' if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
run: | run: |
DOCKERFILE=app.dockerfile DOCKERFILE=${{ matrix.variant.dockerfile }}
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant.name }}
docker buildx build --platform linux/arm64 \ docker buildx build --platform linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \ --cache-from=type=registry,ref=itzcrazykns1337/perplexica:${VARIANT}-canary-arm64 \
--cache-to=type=inline \ --cache-to=type=inline \
--provenance false \ --provenance false \
-f $DOCKERFILE \ -f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \ -t itzcrazykns1337/perplexica:${VARIANT}-canary-arm64 \
--push . --push .
- name: Build and push ARM64 release Docker image - name: Build and push ARM64 release Docker image
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |
DOCKERFILE=app.dockerfile DOCKERFILE=${{ matrix.variant.dockerfile }}
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant.name }}
docker buildx build --platform linux/arm64 \ docker buildx build --platform linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-arm64 \ --cache-from=type=registry,ref=itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-arm64 \
--cache-to=type=inline \ --cache-to=type=inline \
--provenance false \ --provenance false \
-f $DOCKERFILE \ -f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-arm64 \ -t itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-arm64 \
--push . --push .
manifest: manifest:
needs: [build-amd64, build-arm64] needs: [build-amd64, build-arm64]
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
variant: [full, slim]
steps: steps:
- name: Log in to DockerHub - name: Log in to DockerHub
uses: docker/login-action@v2 uses: docker/login-action@v2
@@ -146,29 +163,55 @@ jobs:
id: version id: version
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Create and push multi-arch manifest for main - name: Create and push manifest for main
if: github.ref == 'refs/heads/master' && github.event_name == 'push' if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: | run: |
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant }}
docker manifest create itzcrazykns1337/${IMAGE_NAME}:main \ docker manifest create itzcrazykns1337/perplexica:${VARIANT}-latest \
--amend itzcrazykns1337/${IMAGE_NAME}:amd64 \ --amend itzcrazykns1337/perplexica:${VARIANT}-amd64 \
--amend itzcrazykns1337/${IMAGE_NAME}:arm64 --amend itzcrazykns1337/perplexica:${VARIANT}-arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:main docker manifest push itzcrazykns1337/perplexica:${VARIANT}-latest
- name: Create and push multi-arch manifest for canary if [ "$VARIANT" = "full" ]; then
docker manifest create itzcrazykns1337/perplexica:latest \
--amend itzcrazykns1337/perplexica:${VARIANT}-amd64 \
--amend itzcrazykns1337/perplexica:${VARIANT}-arm64
docker manifest push itzcrazykns1337/perplexica:latest
docker manifest create itzcrazykns1337/perplexica:main \
--amend itzcrazykns1337/perplexica:${VARIANT}-amd64 \
--amend itzcrazykns1337/perplexica:${VARIANT}-arm64
docker manifest push itzcrazykns1337/perplexica:main
fi
- name: Create and push manifest for canary
if: github.ref == 'refs/heads/canary' && github.event_name == 'push' if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
run: | run: |
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant }}
docker manifest create itzcrazykns1337/${IMAGE_NAME}:canary \ docker manifest create itzcrazykns1337/perplexica:${VARIANT}-canary \
--amend itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \ --amend itzcrazykns1337/perplexica:${VARIANT}-canary-amd64 \
--amend itzcrazykns1337/${IMAGE_NAME}:canary-arm64 --amend itzcrazykns1337/perplexica:${VARIANT}-canary-arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:canary docker manifest push itzcrazykns1337/perplexica:${VARIANT}-canary
- name: Create and push multi-arch manifest for releases if [ "$VARIANT" = "full" ]; then
docker manifest create itzcrazykns1337/perplexica:canary \
--amend itzcrazykns1337/perplexica:${VARIANT}-canary-amd64 \
--amend itzcrazykns1337/perplexica:${VARIANT}-canary-arm64
docker manifest push itzcrazykns1337/perplexica:canary
fi
- name: Create and push manifest for releases
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |
IMAGE_NAME=perplexica VARIANT=${{ matrix.variant }}
docker manifest create itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \ docker manifest create itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }} \
--amend itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-amd64 \ --amend itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-amd64 \
--amend itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }}-arm64 --amend itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} docker manifest push itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}
if [ "$VARIANT" = "full" ]; then
docker manifest create itzcrazykns1337/perplexica:${{ env.RELEASE_VERSION }} \
--amend itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-amd64 \
--amend itzcrazykns1337/perplexica:${VARIANT}-${{ env.RELEASE_VERSION }}-arm64
docker manifest push itzcrazykns1337/perplexica:${{ env.RELEASE_VERSION }}
fi

74
Dockerfile Normal file
View File

@@ -0,0 +1,74 @@
FROM node:24.5.0-slim AS builder
RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/*
WORKDIR /home/perplexica
COPY package.json yarn.lock ./
RUN yarn install --frozen-lockfile --network-timeout 600000
COPY tsconfig.json next.config.mjs next-env.d.ts postcss.config.js drizzle.config.ts tailwind.config.ts ./
COPY src ./src
COPY public ./public
COPY drizzle ./drizzle
RUN mkdir -p /home/perplexica/data
RUN yarn build
FROM node:24.5.0-slim
RUN apt-get update && apt-get install -y \
python3-dev python3-babel python3-venv python-is-python3 \
uwsgi uwsgi-plugin-python3 \
git build-essential libxslt-dev zlib1g-dev libffi-dev libssl-dev \
curl sudo \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /home/perplexica
COPY --from=builder /home/perplexica/public ./public
COPY --from=builder /home/perplexica/.next/static ./public/_next/static
COPY --from=builder /home/perplexica/.next/standalone ./
COPY --from=builder /home/perplexica/data ./data
COPY drizzle ./drizzle
RUN mkdir /home/perplexica/uploads
RUN useradd --shell /bin/bash --system \
--home-dir "/usr/local/searxng" \
--comment 'Privacy-respecting metasearch engine' \
searxng
RUN mkdir "/usr/local/searxng"
RUN mkdir -p /etc/searxng
RUN chown -R "searxng:searxng" "/usr/local/searxng"
COPY searxng/settings.yml /etc/searxng/settings.yml
COPY searxng/limiter.toml /etc/searxng/limiter.toml
COPY searxng/uwsgi.ini /etc/searxng/uwsgi.ini
RUN chown -R searxng:searxng /etc/searxng
USER searxng
RUN git clone "https://github.com/searxng/searxng" \
"/usr/local/searxng/searxng-src"
RUN python3 -m venv "/usr/local/searxng/searx-pyenv"
RUN "/usr/local/searxng/searx-pyenv/bin/pip" install --upgrade pip setuptools wheel pyyaml msgspec
RUN cd "/usr/local/searxng/searxng-src" && \
"/usr/local/searxng/searx-pyenv/bin/pip" install --use-pep517 --no-build-isolation -e .
USER root
WORKDIR /home/perplexica
COPY entrypoint.sh ./entrypoint.sh
RUN chmod +x ./entrypoint.sh
RUN sed -i 's/\r$//' ./entrypoint.sh || true
RUN echo "searxng ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
EXPOSE 3000 8080
ENV SEARXNG_API_URL=http://localhost:8080
CMD ["/home/perplexica/entrypoint.sh"]

View File

@@ -30,8 +30,6 @@ COPY drizzle ./drizzle
RUN mkdir /home/perplexica/uploads RUN mkdir /home/perplexica/uploads
COPY entrypoint.sh ./entrypoint.sh EXPOSE 3000
RUN chmod +x ./entrypoint.sh
RUN sed -i 's/\r$//' ./entrypoint.sh || true
CMD ["/home/perplexica/entrypoint.sh"] CMD ["node", "server.js"]

206
README.md
View File

@@ -1,74 +1,76 @@
# 🚀 Perplexica - An AI-powered search engine 🔎 <!-- omit in toc --> # Perplexica 🔍
<div align="center" markdown="1">
<sup>Special thanks to:</sup>
<br>
<br>
<a href="https://www.warp.dev/perplexica">
<img alt="Warp sponsorship" width="400" src="https://github.com/user-attachments/assets/775dd593-9b5f-40f1-bf48-479faff4c27b">
</a>
### [Warp, the AI Devtool that lives in your terminal](https://www.warp.dev/perplexica)
[Available for MacOS, Linux, & Windows](https://www.warp.dev/perplexica)
</div>
<hr/>
[![GitHub Repo stars](https://img.shields.io/github/stars/ItzCrazyKns/Perplexica?style=social)](https://github.com/ItzCrazyKns/Perplexica/stargazers)
[![GitHub forks](https://img.shields.io/github/forks/ItzCrazyKns/Perplexica?style=social)](https://github.com/ItzCrazyKns/Perplexica/network/members)
[![GitHub watchers](https://img.shields.io/github/watchers/ItzCrazyKns/Perplexica?style=social)](https://github.com/ItzCrazyKns/Perplexica/watchers)
[![Docker Pulls](https://img.shields.io/docker/pulls/itzcrazykns1337/perplexica?color=blue)](https://hub.docker.com/r/itzcrazykns1337/perplexica)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/ItzCrazyKns/Perplexica/blob/master/LICENSE)
[![GitHub last commit](https://img.shields.io/github/last-commit/ItzCrazyKns/Perplexica?color=green)](https://github.com/ItzCrazyKns/Perplexica/commits/master)
[![Discord](https://dcbadge.limes.pink/api/server/26aArMy8tT?style=flat)](https://discord.gg/26aArMy8tT) [![Discord](https://dcbadge.limes.pink/api/server/26aArMy8tT?style=flat)](https://discord.gg/26aArMy8tT)
![preview](.assets/perplexica-screenshot.png?) Perplexica is a **privacy-focused AI answering engine** that runs entirely on your own hardware. It combines knowledge from the vast internet with support for **local LLMs** (Ollama) and cloud providers (OpenAI, Claude, Groq), delivering accurate answers with **cited sources** while keeping your searches completely private.
## Table of Contents <!-- omit in toc --> ![preview](.assets/perplexica-screenshot.png)
- [Overview](#overview)
- [Preview](#preview)
- [Features](#features)
- [Installation](#installation)
- [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended)
- [Non-Docker Installation](#non-docker-installation)
- [Ollama Connection Errors](#ollama-connection-errors)
- [Lemonade Connection Errors](#lemonade-connection-errors)
- [Using as a Search Engine](#using-as-a-search-engine)
- [Using Perplexica's API](#using-perplexicas-api)
- [Expose Perplexica to a network](#expose-perplexica-to-network)
- [One-Click Deployment](#one-click-deployment)
- [Upcoming Features](#upcoming-features)
- [Support Us](#support-us)
- [Donations](#donations)
- [Contribution](#contribution)
- [Help and Support](#help-and-support)
## Overview
Perplexica is an open-source AI-powered searching tool or an AI-powered search engine that goes deep into the internet to find answers. Inspired by Perplexity AI, it's an open-source option that not just searches the web but understands your questions. It uses advanced machine learning algorithms like similarity searching and embeddings to refine results and provides clear answers with sources cited.
Using SearxNG to stay current and fully open source, Perplexica ensures you always get the most up-to-date information without compromising your privacy.
Want to know more about its architecture and how it works? You can read it [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/README.md). Want to know more about its architecture and how it works? You can read it [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/README.md).
## Preview ## ✨ Features
![video-preview](.assets/perplexica-preview.gif) 🤖 **Support for all major AI providers** - Use local LLMs through Ollama or connect to OpenAI, Anthropic Claude, Google Gemini, Groq, and more. Mix and match models based on your needs.
## Features **Smart search modes** - Choose Balanced Mode for everyday searches, Fast Mode when you need quick answers, or wait for Quality Mode (coming soon) for deep research.
- **Local LLMs**: You can utilize local LLMs such as Qwen, DeepSeek, Llama, and Mistral. 🎯 **Six specialized focus modes** - Get better results with modes designed for specific tasks: Academic papers, YouTube videos, Reddit discussions, Wolfram Alpha calculations, writing assistance, or general web search.
- **Two Main Modes:**
- **Copilot Mode:** (In development) Boosts search by generating different queries to find more relevant internet sources. Like normal search instead of just using the context by SearxNG, it visits the top matches and tries to find relevant sources to the user's query directly from the page.
- **Normal Mode:** Processes your query and performs a web search.
- **Focus Modes:** Special modes to better answer specific types of questions. Perplexica currently has 6 focus modes:
- **All Mode:** Searches the entire web to find the best results.
- **Writing Assistant Mode:** Helpful for writing tasks that do not require searching the web.
- **Academic Search Mode:** Finds articles and papers, ideal for academic research.
- **YouTube Search Mode:** Finds YouTube videos based on the search query.
- **Wolfram Alpha Search Mode:** Answers queries that need calculations or data analysis using Wolfram Alpha.
- **Reddit Search Mode:** Searches Reddit for discussions and opinions related to the query.
- **Current Information:** Some search tools might give you outdated info because they use data from crawling bots and convert them into embeddings and store them in a index. Unlike them, Perplexica uses SearxNG, a metasearch engine to get the results and rerank and get the most relevant source out of it, ensuring you always get the latest information without the overhead of daily data updates.
- **API**: Integrate Perplexica into your existing applications and make use of its capibilities.
It has many more features like image and video search. Some of the planned features are mentioned in [upcoming features](#upcoming-features). 🔍 **Web search powered by SearxNG** - Access multiple search engines while keeping your identity private. Support for Tavily and Exa coming soon for even better results.
📷 **Image and video search** - Find visual content alongside text results. Search isn't limited to just articles anymore.
📄 **File uploads** - Upload documents and ask questions about them. PDFs, text files, images - Perplexica understands them all.
🌐 **Search specific domains** - Limit your search to specific websites when you know where to look. Perfect for technical documentation or research papers.
💡 **Smart suggestions** - Get intelligent search suggestions as you type, helping you formulate better queries.
📚 **Discover** - Browse interesting articles and trending content throughout the day. Stay informed without even searching.
🕒 **Search history** - Every search is saved locally so you can revisit your discoveries anytime. Your research is never lost.
**More coming soon** - We're actively developing new features based on community feedback. Join our Discord to help shape Perplexica's future!
## Sponsors
Perplexica's development is powered by the generous support of our sponsors. Their contributions help keep this project free, open-source, and accessible to everyone.
<div align="center">
<a href="https://www.warp.dev/perplexica">
<img alt="Warp Terminal" src=".assets/sponsers/warp.png" width="100%">
</a>
### **✨ [Try Warp - The AI-Powered Terminal →](https://www.warp.dev/perplexica)**
Warp is revolutionizing development workflows with AI-powered features, modern UX, and blazing-fast performance. Used by developers at top companies worldwide.
</div>
---
We'd also like to thank the following partners for their generous support:
<table>
<tr>
<td>
<a href="https://dashboard.exa.ai" target="_blank">
<img src=".assets/sponsers/exa.png" alt="Exa" style="max-width: 8rem; max-height: 8rem; border-radius: .75rem;" />
</a>
</td>
<td>
<a href="https://dashboard.exa.ai">Exa</a> • The Perfect Web Search API for LLMs - web search, crawling, deep research, and answer APIs
</td>
</tr>
</table>
## Installation ## Installation
@@ -76,6 +78,35 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
### Getting Started with Docker (Recommended) ### Getting Started with Docker (Recommended)
Perplexica can be easily run using Docker. Simply run the following command:
```bash
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
```
This will pull and start the Perplexica container with the bundled SearxNG search engine. Once running, open your browser and navigate to http://localhost:3000. You can then configure your settings (API keys, models, etc.) directly in the setup screen.
**Note**: The image includes both Perplexica and SearxNG, so no additional setup is required. The `-v` flags create persistent volumes for your data and uploaded files.
#### Using Perplexica with Your Own SearxNG Instance
If you already have SearxNG running, you can use the slim version of Perplexica:
```bash
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
```
**Important**: Make sure your SearxNG instance has:
- JSON format enabled in the settings
- Wolfram Alpha search engine enabled
Replace `http://your-searxng-url:8080` with your actual SearxNG URL. Then configure your AI provider settings in the setup screen at http://localhost:3000.
#### Advanced Setup (Building from Source)
If you prefer to build from source or need more control:
1. Ensure Docker is installed and running on your system. 1. Ensure Docker is installed and running on your system.
2. Clone the Perplexica repository: 2. Clone the Perplexica repository:
@@ -85,39 +116,46 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
3. After cloning, navigate to the directory containing the project files. 3. After cloning, navigate to the directory containing the project files.
4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields: 4. Build and run using Docker:
- `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**.
- `CUSTOM_OPENAI`: Your OpenAI-API-compliant local server URL, model name, and API key. You should run your local server with host set to `0.0.0.0`, take note of which port number it is running on, and then use that port number to set `API_URL = http://host.docker.internal:PORT_NUMBER`. You must specify the model name, such as `MODEL_NAME = "unsloth/DeepSeek-R1-0528-Qwen3-8B-GGUF:Q4_K_XL"`. Finally, set `API_KEY` to the appropriate value. If you have not defined an API key, just put anything you want in-between the quotation marks: `API_KEY = "whatever-you-want-but-not-blank"` **You only need to configure these settings if you want to use a local OpenAI-compliant server, such as Llama.cpp's [`llama-server`](https://github.com/ggml-org/llama.cpp/blob/master/tools/server/README.md)**.
- `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**.
- `LEMONADE`: Your Lemonade API URL. Since Lemonade runs directly on your local machine (not in Docker), you should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Lemonade on port 8000, use `http://host.docker.internal:8000`. For other ports, adjust accordingly. **You need to fill this if you wish to use Lemonade's models**.
- `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**.`
- `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**.
- `Gemini`: Your Gemini API key. **You only need to fill this if you wish to use Google's models**.
- `DEEPSEEK`: Your Deepseek API key. **Only needed if you want Deepseek models.**
- `AIMLAPI`: Your AI/ML API key. **Only needed if you want to use AI/ML API models and embeddings.**
**Note**: You can change these after starting Perplexica from the settings dialog.
- `SIMILARITY_MEASURE`: The similarity measure to use (This is filled by default; you can leave it as is if you are unsure about it.)
5. Ensure you are in the directory containing the `docker-compose.yaml` file and execute:
```bash ```bash
docker compose up -d docker build -t perplexica .
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica perplexica
``` ```
6. Wait a few minutes for the setup to complete. You can access Perplexica at http://localhost:3000 in your web browser. 5. Access Perplexica at http://localhost:3000 and configure your settings in the setup screen.
**Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal. **Note**: After the containers are built, you can start Perplexica directly from Docker without having to open a terminal.
### Non-Docker Installation ### Non-Docker Installation
1. Install SearXNG and allow `JSON` format in the SearXNG settings. 1. Install SearXNG and allow `JSON` format in the SearXNG settings. Make sure Wolfram Alpha search engine is also enabled.
2. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file. 2. Clone the repository:
3. After populating the configuration run `npm i`.
4. Install the dependencies and then execute `npm run build`. ```bash
5. Finally, start the app by running `npm run start` git clone https://github.com/ItzCrazyKns/Perplexica.git
cd Perplexica
```
3. Install dependencies:
```bash
npm i
```
4. Build the application:
```bash
npm run build
```
5. Start the application:
```bash
npm run start
```
6. Open your browser and navigate to http://localhost:3000 to complete the setup and configure your settings (API keys, models, SearxNG URL, etc.) in the setup screen.
**Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies.

View File

@@ -1,35 +1,15 @@
services: services:
searxng: perplexica:
image: docker.io/searxng/searxng:latest image: itzcrazykns1337/perplexica:latest
volumes:
- ./searxng:/etc/searxng:rw
ports: ports:
- 4000:8080 - '3000:3000'
networks:
- perplexica-network
restart: unless-stopped
app:
image: itzcrazykns1337/perplexica:main
build:
context: .
dockerfile: app.dockerfile
environment:
- DOCKER=true
- SEARXNG_API_URL=http://searxng:8080
- DATA_DIR=/home/perplexica
ports:
- 3000:3000
networks:
- perplexica-network
volumes: volumes:
- backend-dbstore:/home/perplexica/data - data:/home/perplexica/data
- uploads:/home/perplexica/uploads - uploads:/home/perplexica/uploads
restart: unless-stopped restart: unless-stopped
networks:
perplexica-network:
volumes: volumes:
backend-dbstore: data:
name: 'perplexica-data'
uploads: uploads:
name: 'perplexica-uploads'

View File

@@ -4,11 +4,56 @@
Perplexicas Search API makes it easy to use our AI-powered search engine. You can run different types of searches, pick the models you want to use, and get the most recent info. Follow the following headings to learn more about Perplexica's search API. Perplexicas Search API makes it easy to use our AI-powered search engine. You can run different types of searches, pick the models you want to use, and get the most recent info. Follow the following headings to learn more about Perplexica's search API.
## Endpoint ## Endpoints
### **POST** `http://localhost:3000/api/search` ### Get Available Providers and Models
**Note**: Replace `3000` with any other port if you've changed the default PORT Before making search requests, you'll need to get the available providers and their models.
#### **GET** `/api/providers`
**Full URL**: `http://localhost:3000/api/providers`
Returns a list of all active providers with their available chat and embedding models.
**Response Example:**
```json
{
"providers": [
{
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "OpenAI",
"chatModels": [
{
"name": "GPT 4 Omni Mini",
"key": "gpt-4o-mini"
},
{
"name": "GPT 4 Omni",
"key": "gpt-4o"
}
],
"embeddingModels": [
{
"name": "Text Embedding 3 Large",
"key": "text-embedding-3-large"
}
]
}
]
}
```
Use the `id` field as the `providerId` and the `key` field from the models arrays when making search requests.
### Search Query
#### **POST** `/api/search`
**Full URL**: `http://localhost:3000/api/search`
**Note**: Replace `localhost:3000` with your Perplexica instance URL if running on a different host or port
### Request ### Request
@@ -19,12 +64,12 @@ The API accepts a JSON object in the request body, where you define the focus mo
```json ```json
{ {
"chatModel": { "chatModel": {
"provider": "openai", "providerId": "550e8400-e29b-41d4-a716-446655440000",
"name": "gpt-4o-mini" "key": "gpt-4o-mini"
}, },
"embeddingModel": { "embeddingModel": {
"provider": "openai", "providerId": "550e8400-e29b-41d4-a716-446655440000",
"name": "text-embedding-3-large" "key": "text-embedding-3-large"
}, },
"optimizationMode": "speed", "optimizationMode": "speed",
"focusMode": "webSearch", "focusMode": "webSearch",
@@ -38,20 +83,19 @@ The API accepts a JSON object in the request body, where you define the focus mo
} }
``` ```
**Note**: The `providerId` must be a valid UUID obtained from the `/api/providers` endpoint. The example above uses a sample UUID for demonstration.
### Request Parameters ### Request Parameters
- **`chatModel`** (object, optional): Defines the chat model to be used for the query. For model details you can send a GET request at `http://localhost:3000/api/models`. Make sure to use the key value (For example "gpt-4o-mini" instead of the display name "GPT 4 omni mini"). - **`chatModel`** (object, optional): Defines the chat model to be used for the query. To get available providers and models, send a GET request to `http://localhost:3000/api/providers`.
- `provider`: Specifies the provider for the chat model (e.g., `openai`, `ollama`). - `providerId` (string): The UUID of the provider. You can get this from the `/api/providers` endpoint response.
- `name`: The specific model from the chosen provider (e.g., `gpt-4o-mini`). - `key` (string): The model key/identifier (e.g., `gpt-4o-mini`, `llama3.1:latest`). Use the `key` value from the provider's `chatModels` array, not the display name.
- Optional fields for custom OpenAI configuration:
- `customOpenAIBaseURL`: If youre using a custom OpenAI instance, provide the base URL.
- `customOpenAIKey`: The API key for a custom OpenAI instance.
- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. For model details you can send a GET request at `http://localhost:3000/api/models`. Make sure to use the key value (For example "text-embedding-3-large" instead of the display name "Text Embedding 3 Large"). - **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. To get available providers and models, send a GET request to `http://localhost:3000/api/providers`.
- `provider`: The provider for the embedding model (e.g., `openai`). - `providerId` (string): The UUID of the embedding provider. You can get this from the `/api/providers` endpoint response.
- `name`: The specific embedding model (e.g., `text-embedding-3-large`). - `key` (string): The embedding model key (e.g., `text-embedding-3-large`, `nomic-embed-text`). Use the `key` value from the provider's `embeddingModels` array, not the display name.
- **`focusMode`** (string, required): Specifies which focus mode to use. Available modes: - **`focusMode`** (string, required): Specifies which focus mode to use. Available modes:
@@ -108,7 +152,7 @@ The response from the API includes both the final message and the sources used t
#### Streaming Response (stream: true) #### Streaming Response (stream: true)
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json. When streaming is enabled, the API returns a stream of newline-delimited JSON objects using Server-Sent Events (SSE). Each line contains a complete, valid JSON object. The response has `Content-Type: text/event-stream`.
Example of streamed response objects: Example of streamed response objects:

View File

@@ -2,45 +2,80 @@
To update Perplexica to the latest version, follow these steps: To update Perplexica to the latest version, follow these steps:
## For Docker users ## For Docker users (Using pre-built images)
1. Clone the latest version of Perplexica from GitHub: Simply pull the latest image and restart your container:
```bash ```bash
git clone https://github.com/ItzCrazyKns/Perplexica.git docker pull itzcrazykns1337/perplexica:latest
docker stop perplexica
docker rm perplexica
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
``` ```
2. Navigate to the project directory. For slim version:
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
4. Pull the latest images from the registry.
```bash ```bash
docker compose pull docker pull itzcrazykns1337/perplexica:slim-latest
docker stop perplexica
docker rm perplexica
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
``` ```
5. Update and recreate the containers. Once updated, go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.
## For Docker users (Building from source)
1. Navigate to your Perplexica directory and pull the latest changes:
```bash ```bash
docker compose up -d cd Perplexica
git pull origin master
``` ```
6. Once the command completes, go to http://localhost:3000 and verify the latest changes. 2. Rebuild the Docker image:
```bash
docker build -t perplexica .
```
3. Stop and remove the old container, then start the new one:
```bash
docker stop perplexica
docker rm perplexica
docker run -p 3000:3000 -p 8080:8080 --name perplexica perplexica
```
4. Once the command completes, go to http://localhost:3000 and verify the latest changes.
## For non-Docker users ## For non-Docker users
1. Clone the latest version of Perplexica from GitHub: 1. Navigate to your Perplexica directory and pull the latest changes:
```bash ```bash
git clone https://github.com/ItzCrazyKns/Perplexica.git cd Perplexica
git pull origin master
``` ```
2. Navigate to the project directory. 2. Install any new dependencies:
3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly. ```bash
4. After populating the configuration run `npm i`. npm i
5. Install the dependencies and then execute `npm run build`. ```
6. Finally, start the app by running `npm run start`
3. Rebuild the application:
```bash
npm run build
```
4. Restart the application:
```bash
npm run start
```
5. Go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.
--- ---

View File

@@ -0,0 +1,15 @@
PRAGMA foreign_keys=OFF;--> statement-breakpoint
CREATE TABLE `__new_messages` (
`id` integer PRIMARY KEY NOT NULL,
`messageId` text NOT NULL,
`chatId` text NOT NULL,
`backendId` text NOT NULL,
`query` text NOT NULL,
`createdAt` text NOT NULL,
`responseBlocks` text DEFAULT '[]',
`status` text DEFAULT 'answering'
);
--> statement-breakpoint
DROP TABLE `messages`;--> statement-breakpoint
ALTER TABLE `__new_messages` RENAME TO `messages`;--> statement-breakpoint
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,132 @@
{
"version": "6",
"dialect": "sqlite",
"id": "1c5eb804-d6b4-48ec-9a8f-75fb729c8e52",
"prevId": "6dedf55f-0e44-478f-82cf-14a21ac686f8",
"tables": {
"chats": {
"name": "chats",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"createdAt": {
"name": "createdAt",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"focusMode": {
"name": "focusMode",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"files": {
"name": "files",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'[]'"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"messages": {
"name": "messages",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"messageId": {
"name": "messageId",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"chatId": {
"name": "chatId",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"backendId": {
"name": "backendId",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"query": {
"name": "query",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"createdAt": {
"name": "createdAt",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"responseBlocks": {
"name": "responseBlocks",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'[]'"
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'answering'"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View File

@@ -15,6 +15,13 @@
"when": 1758863991284, "when": 1758863991284,
"tag": "0001_wise_rockslide", "tag": "0001_wise_rockslide",
"breakpoints": true "breakpoints": true
},
{
"idx": 2,
"version": "6",
"when": 1763732708332,
"tag": "0002_daffy_wrecker",
"breakpoints": true
} }
] ]
} }

View File

@@ -1,4 +1,32 @@
#!/bin/sh #!/bin/sh
set -e set -e
echo "Starting SearXNG..."
sudo -H -u searxng bash -c "cd /usr/local/searxng/searxng-src && export SEARXNG_SETTINGS_PATH='/etc/searxng/settings.yml' && export FLASK_APP=searx/webapp.py && /usr/local/searxng/searx-pyenv/bin/python -m flask run --host=0.0.0.0 --port=8080" &
SEARXNG_PID=$!
echo "Waiting for SearXNG to be ready..."
sleep 5
COUNTER=0
MAX_TRIES=30
until curl -s http://localhost:8080 > /dev/null 2>&1; do
COUNTER=$((COUNTER+1))
if [ $COUNTER -ge $MAX_TRIES ]; then
echo "Warning: SearXNG health check timeout, but continuing..."
break
fi
sleep 1
done
if curl -s http://localhost:8080 > /dev/null 2>&1; then
echo "SearXNG started successfully (PID: $SEARXNG_PID)"
else
echo "SearXNG may not be fully ready, but continuing (PID: $SEARXNG_PID)"
fi
cd /home/perplexica
echo "Starting Perplexica..."
exec node server.js exec node server.js

View File

@@ -1,6 +1,6 @@
{ {
"name": "perplexica-frontend", "name": "perplexica-frontend",
"version": "1.11.0-rc3", "version": "1.11.2",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {
@@ -16,14 +16,15 @@
"@huggingface/transformers": "^3.7.5", "@huggingface/transformers": "^3.7.5",
"@iarna/toml": "^2.2.5", "@iarna/toml": "^2.2.5",
"@icons-pack/react-simple-icons": "^12.3.0", "@icons-pack/react-simple-icons": "^12.3.0",
"@langchain/anthropic": "^0.3.24", "@langchain/anthropic": "^1.0.1",
"@langchain/community": "^0.3.49", "@langchain/community": "^1.0.3",
"@langchain/core": "^0.3.66", "@langchain/core": "^1.0.5",
"@langchain/google-genai": "^0.2.15", "@langchain/google-genai": "^1.0.1",
"@langchain/groq": "^0.2.3", "@langchain/groq": "^1.0.1",
"@langchain/ollama": "^0.2.3", "@langchain/langgraph": "^1.0.1",
"@langchain/openai": "^0.6.2", "@langchain/ollama": "^1.0.1",
"@langchain/textsplitters": "^0.1.0", "@langchain/openai": "^1.1.1",
"@langchain/textsplitters": "^1.0.0",
"@tailwindcss/typography": "^0.5.12", "@tailwindcss/typography": "^0.5.12",
"axios": "^1.8.3", "axios": "^1.8.3",
"better-sqlite3": "^11.9.1", "better-sqlite3": "^11.9.1",
@@ -33,28 +34,35 @@
"framer-motion": "^12.23.24", "framer-motion": "^12.23.24",
"html-to-text": "^9.0.5", "html-to-text": "^9.0.5",
"jspdf": "^3.0.1", "jspdf": "^3.0.1",
"langchain": "^0.3.30", "langchain": "^1.0.4",
"lightweight-charts": "^5.0.9",
"lucide-react": "^0.363.0", "lucide-react": "^0.363.0",
"mammoth": "^1.9.1", "mammoth": "^1.9.1",
"markdown-to-jsx": "^7.7.2", "markdown-to-jsx": "^7.7.2",
"mathjs": "^15.1.0",
"next": "^15.2.2", "next": "^15.2.2",
"next-themes": "^0.3.0", "next-themes": "^0.3.0",
"ollama": "^0.6.3",
"openai": "^6.9.0",
"partial-json": "^0.1.7",
"pdf-parse": "^1.1.1", "pdf-parse": "^1.1.1",
"react": "^18", "react": "^18",
"react-dom": "^18", "react-dom": "^18",
"react-text-to-speech": "^0.14.5", "react-text-to-speech": "^0.14.5",
"react-textarea-autosize": "^8.5.3", "react-textarea-autosize": "^8.5.3",
"rfc6902": "^5.1.2",
"sonner": "^1.4.41", "sonner": "^1.4.41",
"tailwind-merge": "^2.2.2", "tailwind-merge": "^2.2.2",
"winston": "^3.17.0", "winston": "^3.17.0",
"yahoo-finance2": "^3.10.2",
"yet-another-react-lightbox": "^3.17.2", "yet-another-react-lightbox": "^3.17.2",
"zod": "^3.22.4" "zod": "^4.1.12"
}, },
"devDependencies": { "devDependencies": {
"@types/better-sqlite3": "^7.6.12", "@types/better-sqlite3": "^7.6.12",
"@types/html-to-text": "^9.0.4", "@types/html-to-text": "^9.0.4",
"@types/jspdf": "^2.0.0", "@types/jspdf": "^2.0.0",
"@types/node": "^20", "@types/node": "^24.8.1",
"@types/pdf-parse": "^1.1.4", "@types/pdf-parse": "^1.1.4",
"@types/react": "^18", "@types/react": "^18",
"@types/react-dom": "^18", "@types/react-dom": "^18",
@@ -65,6 +73,6 @@
"postcss": "^8", "postcss": "^8",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"tailwindcss": "^3.3.0", "tailwindcss": "^3.3.0",
"typescript": "^5" "typescript": "^5.9.3"
} }
} }

View File

@@ -1,14 +1,10 @@
import crypto from 'crypto'; import crypto from 'crypto';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { EventEmitter } from 'stream';
import db from '@/lib/db';
import { chats, messages as messagesSchema } from '@/lib/db/schema';
import { and, eq, gt } from 'drizzle-orm';
import { getFileDetails } from '@/lib/utils/files';
import { searchHandlers } from '@/lib/search';
import { z } from 'zod'; import { z } from 'zod';
import ModelRegistry from '@/lib/models/registry'; import ModelRegistry from '@/lib/models/registry';
import { ModelWithProvider } from '@/lib/models/types'; import { ModelWithProvider } from '@/lib/models/types';
import SearchAgent from '@/lib/agents/search';
import SessionManager from '@/lib/session';
import { ChatTurnMessage } from '@/lib/types';
export const runtime = 'nodejs'; export const runtime = 'nodejs';
export const dynamic = 'force-dynamic'; export const dynamic = 'force-dynamic';
@@ -20,47 +16,25 @@ const messageSchema = z.object({
}); });
const chatModelSchema: z.ZodType<ModelWithProvider> = z.object({ const chatModelSchema: z.ZodType<ModelWithProvider> = z.object({
providerId: z.string({ providerId: z.string({ message: 'Chat model provider id must be provided' }),
errorMap: () => ({ key: z.string({ message: 'Chat model key must be provided' }),
message: 'Chat model provider id must be provided',
}),
}),
key: z.string({
errorMap: () => ({
message: 'Chat model key must be provided',
}),
}),
}); });
const embeddingModelSchema: z.ZodType<ModelWithProvider> = z.object({ const embeddingModelSchema: z.ZodType<ModelWithProvider> = z.object({
providerId: z.string({ providerId: z.string({
errorMap: () => ({
message: 'Embedding model provider id must be provided', message: 'Embedding model provider id must be provided',
}), }),
}), key: z.string({ message: 'Embedding model key must be provided' }),
key: z.string({
errorMap: () => ({
message: 'Embedding model key must be provided',
}),
}),
}); });
const bodySchema = z.object({ const bodySchema = z.object({
message: messageSchema, message: messageSchema,
optimizationMode: z.enum(['speed', 'balanced', 'quality'], { optimizationMode: z.enum(['speed', 'balanced', 'quality'], {
errorMap: () => ({
message: 'Optimization mode must be one of: speed, balanced, quality', message: 'Optimization mode must be one of: speed, balanced, quality',
}), }),
}),
focusMode: z.string().min(1, 'Focus mode is required'), focusMode: z.string().min(1, 'Focus mode is required'),
history: z history: z
.array( .array(z.tuple([z.string(), z.string()]))
z.tuple([z.string(), z.string()], {
errorMap: () => ({
message: 'History items must be tuples of two strings',
}),
}),
)
.optional() .optional()
.default([]), .default([]),
files: z.array(z.string()).optional().default([]), files: z.array(z.string()).optional().default([]),
@@ -78,7 +52,7 @@ const safeValidateBody = (data: unknown) => {
if (!result.success) { if (!result.success) {
return { return {
success: false, success: false,
error: result.error.errors.map((e) => ({ error: result.error.issues.map((e: any) => ({
path: e.path.join('.'), path: e.path.join('.'),
message: e.message, message: e.message,
})), })),
@@ -91,151 +65,12 @@ const safeValidateBody = (data: unknown) => {
}; };
}; };
const handleEmitterEvents = async (
stream: EventEmitter,
writer: WritableStreamDefaultWriter,
encoder: TextEncoder,
chatId: string,
) => {
let recievedMessage = '';
const aiMessageId = crypto.randomBytes(7).toString('hex');
stream.on('data', (data) => {
const parsedData = JSON.parse(data);
if (parsedData.type === 'response') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'message',
data: parsedData.data,
messageId: aiMessageId,
}) + '\n',
),
);
recievedMessage += parsedData.data;
} else if (parsedData.type === 'sources') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'sources',
data: parsedData.data,
messageId: aiMessageId,
}) + '\n',
),
);
const sourceMessageId = crypto.randomBytes(7).toString('hex');
db.insert(messagesSchema)
.values({
chatId: chatId,
messageId: sourceMessageId,
role: 'source',
sources: parsedData.data,
createdAt: new Date().toString(),
})
.execute();
}
});
stream.on('end', () => {
writer.write(
encoder.encode(
JSON.stringify({
type: 'messageEnd',
}) + '\n',
),
);
writer.close();
db.insert(messagesSchema)
.values({
content: recievedMessage,
chatId: chatId,
messageId: aiMessageId,
role: 'assistant',
createdAt: new Date().toString(),
})
.execute();
});
stream.on('error', (data) => {
const parsedData = JSON.parse(data);
writer.write(
encoder.encode(
JSON.stringify({
type: 'error',
data: parsedData.data,
}),
),
);
writer.close();
});
};
const handleHistorySave = async (
message: Message,
humanMessageId: string,
focusMode: string,
files: string[],
) => {
const chat = await db.query.chats.findFirst({
where: eq(chats.id, message.chatId),
});
const fileData = files.map(getFileDetails);
if (!chat) {
await db
.insert(chats)
.values({
id: message.chatId,
title: message.content,
createdAt: new Date().toString(),
focusMode: focusMode,
files: fileData,
})
.execute();
} else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) {
db.update(chats)
.set({
files: files.map(getFileDetails),
})
.where(eq(chats.id, message.chatId));
}
const messageExists = await db.query.messages.findFirst({
where: eq(messagesSchema.messageId, humanMessageId),
});
if (!messageExists) {
await db
.insert(messagesSchema)
.values({
content: message.content,
chatId: message.chatId,
messageId: humanMessageId,
role: 'user',
createdAt: new Date().toString(),
})
.execute();
} else {
await db
.delete(messagesSchema)
.where(
and(
gt(messagesSchema.id, messageExists.id),
eq(messagesSchema.chatId, message.chatId),
),
)
.execute();
}
};
export const POST = async (req: Request) => { export const POST = async (req: Request) => {
try { try {
const reqBody = (await req.json()) as Body; const reqBody = (await req.json()) as Body;
const parseBody = safeValidateBody(reqBody); const parseBody = safeValidateBody(reqBody);
if (!parseBody.success) { if (!parseBody.success) {
return Response.json( return Response.json(
{ message: 'Invalid request body', error: parseBody.error }, { message: 'Invalid request body', error: parseBody.error },
@@ -265,48 +100,116 @@ export const POST = async (req: Request) => {
), ),
]); ]);
const humanMessageId = const history: ChatTurnMessage[] = body.history.map((msg) => {
message.messageId ?? crypto.randomBytes(7).toString('hex');
const history: BaseMessage[] = body.history.map((msg) => {
if (msg[0] === 'human') { if (msg[0] === 'human') {
return new HumanMessage({ return {
role: 'user',
content: msg[1], content: msg[1],
}); };
} else { } else {
return new AIMessage({ return {
role: 'assistant',
content: msg[1], content: msg[1],
}); };
} }
}); });
const handler = searchHandlers[body.focusMode]; const agent = new SearchAgent();
const session = SessionManager.createSession();
if (!handler) {
return Response.json(
{
message: 'Invalid focus mode',
},
{ status: 400 },
);
}
const stream = await handler.searchAndAnswer(
message.content,
history,
llm,
embedding,
body.optimizationMode,
body.files,
body.systemInstructions as string,
);
const responseStream = new TransformStream(); const responseStream = new TransformStream();
const writer = responseStream.writable.getWriter(); const writer = responseStream.writable.getWriter();
const encoder = new TextEncoder(); const encoder = new TextEncoder();
handleEmitterEvents(stream, writer, encoder, message.chatId); let receivedMessage = '';
handleHistorySave(message, humanMessageId, body.focusMode, body.files);
session.addListener('data', (data: any) => {
if (data.type === 'response') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'message',
data: data.data,
}) + '\n',
),
);
receivedMessage += data.data;
} else if (data.type === 'sources') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'sources',
data: data.data,
}) + '\n',
),
);
} else if (data.type === 'block') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'block',
block: data.block,
}) + '\n',
),
);
} else if (data.type === 'updateBlock') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'updateBlock',
blockId: data.blockId,
patch: data.patch,
}) + '\n',
),
);
} else if (data.type === 'researchComplete') {
writer.write(
encoder.encode(
JSON.stringify({
type: 'researchComplete',
}) + '\n',
),
);
}
});
session.addListener('end', () => {
writer.write(
encoder.encode(
JSON.stringify({
type: 'messageEnd',
}) + '\n',
),
);
writer.close();
session.removeAllListeners();
});
session.addListener('error', (data: any) => {
writer.write(
encoder.encode(
JSON.stringify({
type: 'error',
data: data.data,
}) + '\n',
),
);
writer.close();
session.removeAllListeners();
});
agent.searchAsync(session, {
chatHistory: history,
followUp: message.content,
config: {
llm,
embedding: embedding,
sources: ['web'],
mode: body.optimizationMode,
},
});
/* handleHistorySave(message, humanMessageId, body.focusMode, body.files); */
return new Response(responseStream.readable, { return new Response(responseStream.readable, {
headers: { headers: {

View File

@@ -1,7 +1,6 @@
import handleImageSearch from '@/lib/chains/imageSearchAgent'; import searchImages from '@/lib/agents/media/image';
import ModelRegistry from '@/lib/models/registry'; import ModelRegistry from '@/lib/models/registry';
import { ModelWithProvider } from '@/lib/models/types'; import { ModelWithProvider } from '@/lib/models/types';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
interface ImageSearchBody { interface ImageSearchBody {
query: string; query: string;
@@ -13,16 +12,6 @@ export const POST = async (req: Request) => {
try { try {
const body: ImageSearchBody = await req.json(); const body: ImageSearchBody = await req.json();
const chatHistory = body.chatHistory
.map((msg: any) => {
if (msg.role === 'user') {
return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') {
return new AIMessage(msg.content);
}
})
.filter((msg) => msg !== undefined) as BaseMessage[];
const registry = new ModelRegistry(); const registry = new ModelRegistry();
const llm = await registry.loadChatModel( const llm = await registry.loadChatModel(
@@ -30,9 +19,9 @@ export const POST = async (req: Request) => {
body.chatModel.key, body.chatModel.key,
); );
const images = await handleImageSearch( const images = await searchImages(
{ {
chat_history: chatHistory, chatHistory: body.chatHistory,
query: body.query, query: body.query,
}, },
llm, llm,

View File

@@ -1,8 +1,8 @@
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { MetaSearchAgentType } from '@/lib/search/metaSearchAgent';
import { searchHandlers } from '@/lib/search';
import ModelRegistry from '@/lib/models/registry'; import ModelRegistry from '@/lib/models/registry';
import { ModelWithProvider } from '@/lib/models/types'; import { ModelWithProvider } from '@/lib/models/types';
import SessionManager from '@/lib/session';
import SearchAgent from '@/lib/agents/search';
import { ChatTurnMessage } from '@/lib/types';
interface ChatRequestBody { interface ChatRequestBody {
optimizationMode: 'speed' | 'balanced'; optimizationMode: 'speed' | 'balanced';
@@ -30,12 +30,6 @@ export const POST = async (req: Request) => {
body.optimizationMode = body.optimizationMode || 'balanced'; body.optimizationMode = body.optimizationMode || 'balanced';
body.stream = body.stream || false; body.stream = body.stream || false;
const history: BaseMessage[] = body.history.map((msg) => {
return msg[0] === 'human'
? new HumanMessage({ content: msg[1] })
: new AIMessage({ content: msg[1] });
});
const registry = new ModelRegistry(); const registry = new ModelRegistry();
const [llm, embeddings] = await Promise.all([ const [llm, embeddings] = await Promise.all([
@@ -46,21 +40,26 @@ export const POST = async (req: Request) => {
), ),
]); ]);
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode]; const history: ChatTurnMessage[] = body.history.map((msg) => {
return msg[0] === 'human'
? { role: 'user', content: msg[1] }
: { role: 'assistant', content: msg[1] };
});
if (!searchHandler) { const session = SessionManager.createSession();
return Response.json({ message: 'Invalid focus mode' }, { status: 400 });
}
const emitter = await searchHandler.searchAndAnswer( const agent = new SearchAgent();
body.query,
history, agent.searchAsync(session, {
llm, chatHistory: history,
embeddings, config: {
body.optimizationMode, embedding: embeddings,
[], llm: llm,
body.systemInstructions || '', sources: ['web', 'discussions', 'academic'],
); mode: 'balanced',
},
followUp: body.query,
});
if (!body.stream) { if (!body.stream) {
return new Promise( return new Promise(
@@ -71,7 +70,7 @@ export const POST = async (req: Request) => {
let message = ''; let message = '';
let sources: any[] = []; let sources: any[] = [];
emitter.on('data', (data: string) => { session.addListener('data', (data: string) => {
try { try {
const parsedData = JSON.parse(data); const parsedData = JSON.parse(data);
if (parsedData.type === 'response') { if (parsedData.type === 'response') {
@@ -89,11 +88,11 @@ export const POST = async (req: Request) => {
} }
}); });
emitter.on('end', () => { session.addListener('end', () => {
resolve(Response.json({ message, sources }, { status: 200 })); resolve(Response.json({ message, sources }, { status: 200 }));
}); });
emitter.on('error', (error: any) => { session.addListener('error', (error: any) => {
reject( reject(
Response.json( Response.json(
{ message: 'Search error', error }, { message: 'Search error', error },
@@ -124,14 +123,14 @@ export const POST = async (req: Request) => {
); );
signal.addEventListener('abort', () => { signal.addEventListener('abort', () => {
emitter.removeAllListeners(); session.removeAllListeners();
try { try {
controller.close(); controller.close();
} catch (error) {} } catch (error) {}
}); });
emitter.on('data', (data: string) => { session.addListener('data', (data: string) => {
if (signal.aborted) return; if (signal.aborted) return;
try { try {
@@ -162,7 +161,7 @@ export const POST = async (req: Request) => {
} }
}); });
emitter.on('end', () => { session.addListener('end', () => {
if (signal.aborted) return; if (signal.aborted) return;
controller.enqueue( controller.enqueue(
@@ -175,7 +174,7 @@ export const POST = async (req: Request) => {
controller.close(); controller.close();
}); });
emitter.on('error', (error: any) => { session.addListener('error', (error: any) => {
if (signal.aborted) return; if (signal.aborted) return;
controller.error(error); controller.error(error);

View File

@@ -1,7 +1,6 @@
import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent'; import generateSuggestions from '@/lib/agents/suggestions';
import ModelRegistry from '@/lib/models/registry'; import ModelRegistry from '@/lib/models/registry';
import { ModelWithProvider } from '@/lib/models/types'; import { ModelWithProvider } from '@/lib/models/types';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
interface SuggestionsGenerationBody { interface SuggestionsGenerationBody {
@@ -13,16 +12,6 @@ export const POST = async (req: Request) => {
try { try {
const body: SuggestionsGenerationBody = await req.json(); const body: SuggestionsGenerationBody = await req.json();
const chatHistory = body.chatHistory
.map((msg: any) => {
if (msg.role === 'user') {
return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') {
return new AIMessage(msg.content);
}
})
.filter((msg) => msg !== undefined) as BaseMessage[];
const registry = new ModelRegistry(); const registry = new ModelRegistry();
const llm = await registry.loadChatModel( const llm = await registry.loadChatModel(
@@ -32,7 +21,7 @@ export const POST = async (req: Request) => {
const suggestions = await generateSuggestions( const suggestions = await generateSuggestions(
{ {
chat_history: chatHistory, chatHistory: body.chatHistory,
}, },
llm, llm,
); );

View File

@@ -5,8 +5,9 @@ import crypto from 'crypto';
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf'; import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx'; import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
import { Document } from 'langchain/document'; import { Document } from '@langchain/core/documents';
import ModelRegistry from '@/lib/models/registry'; import ModelRegistry from '@/lib/models/registry';
import { Chunk } from '@/lib/types';
interface FileRes { interface FileRes {
fileName: string; fileName: string;
@@ -87,9 +88,17 @@ export async function POST(req: Request) {
}), }),
); );
const embeddings = await model.embedDocuments( const chunks: Chunk[] = splitted.map((doc) => {
splitted.map((doc) => doc.pageContent), return {
content: doc.pageContent,
metadata: doc.metadata,
}
});
const embeddings = await model.embedChunks(
chunks
); );
const embeddingsDataPath = filePath.replace( const embeddingsDataPath = filePath.replace(
/\.\w+$/, /\.\w+$/,
'-embeddings.json', '-embeddings.json',

View File

@@ -1,7 +1,6 @@
import handleVideoSearch from '@/lib/chains/videoSearchAgent'; import handleVideoSearch from '@/lib/agents/media/video';
import ModelRegistry from '@/lib/models/registry'; import ModelRegistry from '@/lib/models/registry';
import { ModelWithProvider } from '@/lib/models/types'; import { ModelWithProvider } from '@/lib/models/types';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
interface VideoSearchBody { interface VideoSearchBody {
query: string; query: string;
@@ -13,16 +12,6 @@ export const POST = async (req: Request) => {
try { try {
const body: VideoSearchBody = await req.json(); const body: VideoSearchBody = await req.json();
const chatHistory = body.chatHistory
.map((msg: any) => {
if (msg.role === 'user') {
return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') {
return new AIMessage(msg.content);
}
})
.filter((msg) => msg !== undefined) as BaseMessage[];
const registry = new ModelRegistry(); const registry = new ModelRegistry();
const llm = await registry.loadChatModel( const llm = await registry.loadChatModel(
@@ -32,7 +21,7 @@ export const POST = async (req: Request) => {
const videos = await handleVideoSearch( const videos = await handleVideoSearch(
{ {
chat_history: chatHistory, chatHistory: body.chatHistory,
query: body.query, query: body.query,
}, },
llm, llm,

View File

@@ -1,17 +1,10 @@
'use client'; 'use client';
import ChatWindow from '@/components/ChatWindow'; import ChatWindow from '@/components/ChatWindow';
import { useParams } from 'next/navigation';
import React from 'react'; import React from 'react';
import { ChatProvider } from '@/lib/hooks/useChat';
const Page = () => { const Page = () => {
const { chatId }: { chatId: string } = useParams(); return <ChatWindow />;
return (
<ChatProvider id={chatId}>
<ChatWindow />
</ChatProvider>
);
}; };
export default Page; export default Page;

View File

@@ -9,6 +9,7 @@ import { Toaster } from 'sonner';
import ThemeProvider from '@/components/theme/Provider'; import ThemeProvider from '@/components/theme/Provider';
import configManager from '@/lib/config'; import configManager from '@/lib/config';
import SetupWizard from '@/components/Setup/SetupWizard'; import SetupWizard from '@/components/Setup/SetupWizard';
import { ChatProvider } from '@/lib/hooks/useChat';
const montserrat = Montserrat({ const montserrat = Montserrat({
weight: ['300', '400', '500', '700'], weight: ['300', '400', '500', '700'],
@@ -36,7 +37,7 @@ export default function RootLayout({
<body className={cn('h-full', montserrat.className)}> <body className={cn('h-full', montserrat.className)}>
<ThemeProvider> <ThemeProvider>
{setupComplete ? ( {setupComplete ? (
<> <ChatProvider>
<Sidebar>{children}</Sidebar> <Sidebar>{children}</Sidebar>
<Toaster <Toaster
toastOptions={{ toastOptions={{
@@ -47,7 +48,7 @@ export default function RootLayout({
}, },
}} }}
/> />
</> </ChatProvider>
) : ( ) : (
<SetupWizard configSections={configSections} /> <SetupWizard configSections={configSections} />
)} )}

View File

@@ -1,7 +1,5 @@
import ChatWindow from '@/components/ChatWindow'; import ChatWindow from '@/components/ChatWindow';
import { ChatProvider } from '@/lib/hooks/useChat';
import { Metadata } from 'next'; import { Metadata } from 'next';
import { Suspense } from 'react';
export const metadata: Metadata = { export const metadata: Metadata = {
title: 'Chat - Perplexica', title: 'Chat - Perplexica',
@@ -9,15 +7,7 @@ export const metadata: Metadata = {
}; };
const Home = () => { const Home = () => {
return ( return <ChatWindow />;
<div>
<Suspense>
<ChatProvider>
<ChatWindow />
</ChatProvider>
</Suspense>
</div>
);
}; };
export default Home; export default Home;

View File

@@ -0,0 +1,197 @@
'use client';
import { Brain, Search, FileText, ChevronDown, ChevronUp } from 'lucide-react';
import { motion, AnimatePresence } from 'framer-motion';
import { useEffect, useState } from 'react';
import { ResearchBlock, ResearchBlockSubStep } from '@/lib/types';
import { useChat } from '@/lib/hooks/useChat';
const getStepIcon = (step: ResearchBlockSubStep) => {
if (step.type === 'reasoning') {
return <Brain className="w-4 h-4" />;
} else if (step.type === 'searching') {
return <Search className="w-4 h-4" />;
} else if (step.type === 'reading') {
return <FileText className="w-4 h-4" />;
}
return null;
};
const getStepTitle = (
step: ResearchBlockSubStep,
isStreaming: boolean,
): string => {
if (step.type === 'reasoning') {
return isStreaming && !step.reasoning ? 'Thinking...' : 'Thinking';
} else if (step.type === 'searching') {
return `Searching ${step.searching.length} ${step.searching.length === 1 ? 'query' : 'queries'}`;
} else if (step.type === 'reading') {
return `Found ${step.reading.length} ${step.reading.length === 1 ? 'result' : 'results'}`;
}
return 'Processing';
};
const AssistantSteps = ({
block,
status,
}: {
block: ResearchBlock;
status: 'answering' | 'completed' | 'error';
}) => {
const [isExpanded, setIsExpanded] = useState(true);
const { researchEnded, loading } = useChat();
useEffect(() => {
if (researchEnded) {
setIsExpanded(false);
} else if (status === 'answering') {
setIsExpanded(true);
}
}, [researchEnded, status]);
if (!block || block.data.subSteps.length === 0) return null;
return (
<div className="rounded-lg bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 overflow-hidden">
<button
onClick={() => setIsExpanded(!isExpanded)}
className="w-full flex items-center justify-between p-3 hover:bg-light-200 dark:hover:bg-dark-200 transition duration-200"
>
<div className="flex items-center gap-2">
<Brain className="w-4 h-4 text-black dark:text-white" />
<span className="text-sm font-medium text-black dark:text-white">
Research Progress ({block.data.subSteps.length}{' '}
{block.data.subSteps.length === 1 ? 'step' : 'steps'})
</span>
</div>
{isExpanded ? (
<ChevronUp className="w-4 h-4 text-black/70 dark:text-white/70" />
) : (
<ChevronDown className="w-4 h-4 text-black/70 dark:text-white/70" />
)}
</button>
<AnimatePresence>
{isExpanded && (
<motion.div
initial={{ height: 0, opacity: 0 }}
animate={{ height: 'auto', opacity: 1 }}
exit={{ height: 0, opacity: 0 }}
transition={{ duration: 0.2 }}
className="border-t border-light-200 dark:border-dark-200"
>
<div className="p-3 space-y-2">
{block.data.subSteps.map((step, index) => {
const isLastStep = index === block.data.subSteps.length - 1;
const isStreaming = loading && isLastStep && !researchEnded;
return (
<motion.div
key={step.id}
initial={{ opacity: 0, x: -10 }}
animate={{ opacity: 1, x: 0 }}
transition={{ duration: 0.2, delay: 0 }}
className="flex gap-3"
>
{/* Timeline connector */}
<div className="flex flex-col items-center pt-0.5">
<div
className={`rounded-full p-1.5 bg-light-100 dark:bg-dark-100 text-black/70 dark:text-white/70 ${isStreaming ? 'animate-pulse' : ''}`}
>
{getStepIcon(step)}
</div>
{index < block.data.subSteps.length - 1 && (
<div className="w-0.5 flex-1 min-h-[20px] bg-light-200 dark:bg-dark-200 mt-1.5" />
)}
</div>
{/* Step content */}
<div className="flex-1 pb-1">
<span className="text-sm font-medium text-black dark:text-white">
{getStepTitle(step, isStreaming)}
</span>
{step.type === 'reasoning' && (
<>
{step.reasoning && (
<p className="text-xs text-black/70 dark:text-white/70 mt-0.5">
{step.reasoning}
</p>
)}
{isStreaming && !step.reasoning && (
<div className="flex items-center gap-1.5 mt-0.5">
<div
className="w-1.5 h-1.5 bg-black/40 dark:bg-white/40 rounded-full animate-bounce"
style={{ animationDelay: '0ms' }}
/>
<div
className="w-1.5 h-1.5 bg-black/40 dark:bg-white/40 rounded-full animate-bounce"
style={{ animationDelay: '150ms' }}
/>
<div
className="w-1.5 h-1.5 bg-black/40 dark:bg-white/40 rounded-full animate-bounce"
style={{ animationDelay: '300ms' }}
/>
</div>
)}
</>
)}
{step.type === 'searching' &&
step.searching.length > 0 && (
<div className="flex flex-wrap gap-1.5 mt-1.5">
{step.searching.map((query, idx) => (
<span
key={idx}
className="inline-flex items-center px-2 py-0.5 rounded-md text-xs font-medium bg-light-100 dark:bg-dark-100 text-black/70 dark:text-white/70 border border-light-200 dark:border-dark-200"
>
{query}
</span>
))}
</div>
)}
{step.type === 'reading' && step.reading.length > 0 && (
<div className="flex flex-wrap gap-1.5 mt-1.5">
{step.reading.slice(0, 4).map((result, idx) => {
const url = result.metadata.url || '';
const title = result.metadata.title || 'Untitled';
const domain = url ? new URL(url).hostname : '';
const faviconUrl = domain
? `https://s2.googleusercontent.com/s2/favicons?domain=${domain}&sz=128`
: '';
return (
<span
key={idx}
className="inline-flex items-center gap-1.5 px-2 py-0.5 rounded-md text-xs font-medium bg-light-100 dark:bg-dark-100 text-black/70 dark:text-white/70 border border-light-200 dark:border-dark-200"
>
{faviconUrl && (
<img
src={faviconUrl}
alt=""
className="w-3 h-3 rounded-sm flex-shrink-0"
onError={(e) => {
e.currentTarget.style.display = 'none';
}}
/>
)}
<span className="line-clamp-1">{title}</span>
</span>
);
})}
</div>
)}
</div>
</motion.div>
);
})}
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
};
export default AssistantSteps;

View File

@@ -7,11 +7,12 @@ import MessageBoxLoading from './MessageBoxLoading';
import { useChat } from '@/lib/hooks/useChat'; import { useChat } from '@/lib/hooks/useChat';
const Chat = () => { const Chat = () => {
const { sections, chatTurns, loading, messageAppeared } = useChat(); const { sections, loading, messageAppeared, messages } = useChat();
const [dividerWidth, setDividerWidth] = useState(0); const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef<HTMLDivElement | null>(null); const dividerRef = useRef<HTMLDivElement | null>(null);
const messageEnd = useRef<HTMLDivElement | null>(null); const messageEnd = useRef<HTMLDivElement | null>(null);
const lastScrolledRef = useRef<number>(0);
useEffect(() => { useEffect(() => {
const updateDividerWidth = () => { const updateDividerWidth = () => {
@@ -22,35 +23,40 @@ const Chat = () => {
updateDividerWidth(); updateDividerWidth();
const resizeObserver = new ResizeObserver(() => {
updateDividerWidth();
});
const currentRef = dividerRef.current;
if (currentRef) {
resizeObserver.observe(currentRef);
}
window.addEventListener('resize', updateDividerWidth); window.addEventListener('resize', updateDividerWidth);
return () => { return () => {
if (currentRef) {
resizeObserver.unobserve(currentRef);
}
resizeObserver.disconnect();
window.removeEventListener('resize', updateDividerWidth); window.removeEventListener('resize', updateDividerWidth);
}; };
}, []); }, [sections.length]);
useEffect(() => { useEffect(() => {
const scroll = () => { const scroll = () => {
messageEnd.current?.scrollIntoView({ behavior: 'auto' }); messageEnd.current?.scrollIntoView({ behavior: 'auto' });
}; };
if (chatTurns.length === 1) { if (messages.length === 1) {
document.title = `${chatTurns[0].content.substring(0, 30)} - Perplexica`; document.title = `${messages[0].query.substring(0, 30)} - Perplexica`;
} }
const messageEndBottom = if (sections.length > lastScrolledRef.current) {
messageEnd.current?.getBoundingClientRect().bottom ?? 0;
const distanceFromMessageEnd = window.innerHeight - messageEndBottom;
if (distanceFromMessageEnd >= -100) {
scroll(); scroll();
lastScrolledRef.current = sections.length;
} }
}, [messages]);
if (chatTurns[chatTurns.length - 1]?.role === 'user') {
scroll();
}
}, [chatTurns]);
return ( return (
<div className="flex flex-col space-y-6 pt-8 pb-44 lg:pb-32 sm:mx-4 md:mx-8"> <div className="flex flex-col space-y-6 pt-8 pb-44 lg:pb-32 sm:mx-4 md:mx-8">
@@ -58,7 +64,7 @@ const Chat = () => {
const isLast = i === sections.length - 1; const isLast = i === sections.length - 1;
return ( return (
<Fragment key={section.userMessage.messageId}> <Fragment key={section.message.messageId}>
<MessageBox <MessageBox
section={section} section={section}
sectionIndex={i} sectionIndex={i}

View File

@@ -1,14 +1,12 @@
'use client'; 'use client';
import { Document } from '@langchain/core/documents';
import Navbar from './Navbar'; import Navbar from './Navbar';
import Chat from './Chat'; import Chat from './Chat';
import EmptyChat from './EmptyChat'; import EmptyChat from './EmptyChat';
import { Settings } from 'lucide-react';
import Link from 'next/link';
import NextError from 'next/error'; import NextError from 'next/error';
import { useChat } from '@/lib/hooks/useChat'; import { useChat } from '@/lib/hooks/useChat';
import Loader from './ui/Loader'; import SettingsButtonMobile from './Settings/SettingsButtonMobile';
import { Block, Chunk } from '@/lib/types';
export interface BaseMessage { export interface BaseMessage {
chatId: string; chatId: string;
@@ -16,20 +14,27 @@ export interface BaseMessage {
createdAt: Date; createdAt: Date;
} }
export interface Message extends BaseMessage {
backendId: string;
query: string;
responseBlocks: Block[];
status: 'answering' | 'completed' | 'error';
}
export interface UserMessage extends BaseMessage {
role: 'user';
content: string;
}
export interface AssistantMessage extends BaseMessage { export interface AssistantMessage extends BaseMessage {
role: 'assistant'; role: 'assistant';
content: string; content: string;
suggestions?: string[]; suggestions?: string[];
} }
export interface UserMessage extends BaseMessage {
role: 'user';
content: string;
}
export interface SourceMessage extends BaseMessage { export interface SourceMessage extends BaseMessage {
role: 'source'; role: 'source';
sources: Document[]; sources: Chunk[];
} }
export interface SuggestionMessage extends BaseMessage { export interface SuggestionMessage extends BaseMessage {
@@ -37,11 +42,12 @@ export interface SuggestionMessage extends BaseMessage {
suggestions: string[]; suggestions: string[];
} }
export type Message = export type LegacyMessage =
| AssistantMessage | AssistantMessage
| UserMessage | UserMessage
| SourceMessage | SourceMessage
| SuggestionMessage; | SuggestionMessage;
export type ChatTurn = UserMessage | AssistantMessage; export type ChatTurn = UserMessage | AssistantMessage;
export interface File { export interface File {
@@ -50,15 +56,18 @@ export interface File {
fileId: string; fileId: string;
} }
export interface Widget {
widgetType: string;
params: Record<string, any>;
}
const ChatWindow = () => { const ChatWindow = () => {
const { hasError, isReady, notFound, messages } = useChat(); const { hasError, notFound, messages } = useChat();
if (hasError) { if (hasError) {
return ( return (
<div className="relative"> <div className="relative">
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5"> <div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
<Link href="/settings"> <SettingsButtonMobile />
<Settings className="cursor-pointer lg:hidden" />
</Link>
</div> </div>
<div className="flex flex-col items-center justify-center min-h-screen"> <div className="flex flex-col items-center justify-center min-h-screen">
<p className="dark:text-white/70 text-black/70 text-sm"> <p className="dark:text-white/70 text-black/70 text-sm">
@@ -69,8 +78,7 @@ const ChatWindow = () => {
); );
} }
return isReady ? ( return notFound ? (
notFound ? (
<NextError statusCode={404} /> <NextError statusCode={404} />
) : ( ) : (
<div> <div>
@@ -83,11 +91,6 @@ const ChatWindow = () => {
<EmptyChat /> <EmptyChat />
)} )}
</div> </div>
)
) : (
<div className="flex flex-row items-center justify-center min-h-screen">
<Loader />
</div>
); );
}; };

View File

@@ -4,14 +4,13 @@ import { File } from './ChatWindow';
import Link from 'next/link'; import Link from 'next/link';
import WeatherWidget from './WeatherWidget'; import WeatherWidget from './WeatherWidget';
import NewsArticleWidget from './NewsArticleWidget'; import NewsArticleWidget from './NewsArticleWidget';
import SettingsButtonMobile from '@/components/Settings/SettingsButtonMobile';
const EmptyChat = () => { const EmptyChat = () => {
return ( return (
<div className="relative"> <div className="relative">
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5"> <div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
<Link href="/settings"> <SettingsButtonMobile />
<Settings className="cursor-pointer lg:hidden" />
</Link>
</div> </div>
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-4"> <div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-4">
<div className="flex flex-col items-center justify-center w-full space-y-8"> <div className="flex flex-col items-center justify-center w-full space-y-8">

View File

@@ -15,14 +15,21 @@ const Copy = ({
return ( return (
<button <button
onClick={() => { onClick={() => {
const contentToCopy = `${initialMessage}${section?.sourceMessage?.sources && section.sourceMessage.sources.length > 0 && `\n\nCitations:\n${section.sourceMessage.sources?.map((source: any, i: any) => `[${i + 1}] ${source.metadata.url}`).join(`\n`)}`}`; const contentToCopy = `${initialMessage}${
section?.message.responseBlocks.filter((b) => b.type === 'source')
?.length > 0 &&
`\n\nCitations:\n${section.message.responseBlocks
.filter((b) => b.type === 'source')
?.map((source: any, i: any) => `[${i + 1}] ${source.metadata.url}`)
.join(`\n`)}`
}`;
navigator.clipboard.writeText(contentToCopy); navigator.clipboard.writeText(contentToCopy);
setCopied(true); setCopied(true);
setTimeout(() => setCopied(false), 1000); setTimeout(() => setCopied(false), 1000);
}} }}
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white" className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
> >
{copied ? <Check size={18} /> : <ClipboardList size={18} />} {copied ? <Check size={16} /> : <ClipboardList size={16} />}
</button> </button>
); );
}; };

View File

@@ -1,4 +1,4 @@
import { ArrowLeftRight } from 'lucide-react'; import { ArrowLeftRight, Repeat } from 'lucide-react';
const Rewrite = ({ const Rewrite = ({
rewrite, rewrite,
@@ -10,12 +10,11 @@ const Rewrite = ({
return ( return (
<button <button
onClick={() => rewrite(messageId)} onClick={() => rewrite(messageId)}
className="py-2 px-3 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white flex flex-row items-center space-x-1" className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white flex flex-row items-center space-x-1"
> >
<ArrowLeftRight size={18} /> <Repeat size={16} />
<p className="text-xs font-medium">Rewrite</p>
</button> </button>
); );
}; };
1;
export default Rewrite; export default Rewrite;

View File

@@ -10,6 +10,7 @@ import {
StopCircle, StopCircle,
Layers3, Layers3,
Plus, Plus,
CornerDownRight,
} from 'lucide-react'; } from 'lucide-react';
import Markdown, { MarkdownToJSX } from 'markdown-to-jsx'; import Markdown, { MarkdownToJSX } from 'markdown-to-jsx';
import Copy from './MessageActions/Copy'; import Copy from './MessageActions/Copy';
@@ -21,6 +22,9 @@ import { useSpeech } from 'react-text-to-speech';
import ThinkBox from './ThinkBox'; import ThinkBox from './ThinkBox';
import { useChat, Section } from '@/lib/hooks/useChat'; import { useChat, Section } from '@/lib/hooks/useChat';
import Citation from './Citation'; import Citation from './Citation';
import AssistantSteps from './AssistantSteps';
import { ResearchBlock } from '@/lib/types';
import Renderer from './Widgets/Renderer';
const ThinkTagProcessor = ({ const ThinkTagProcessor = ({
children, children,
@@ -45,12 +49,21 @@ const MessageBox = ({
dividerRef?: MutableRefObject<HTMLDivElement | null>; dividerRef?: MutableRefObject<HTMLDivElement | null>;
isLast: boolean; isLast: boolean;
}) => { }) => {
const { loading, chatTurns, sendMessage, rewrite } = useChat(); const { loading, sendMessage, rewrite, messages, researchEnded } = useChat();
const parsedMessage = section.parsedAssistantMessage || ''; const parsedMessage = section.parsedTextBlocks.join('\n\n');
const speechMessage = section.speechMessage || ''; const speechMessage = section.speechMessage || '';
const thinkingEnded = section.thinkingEnded; const thinkingEnded = section.thinkingEnded;
const sourceBlocks = section.message.responseBlocks.filter(
(block): block is typeof block & { type: 'source' } =>
block.type === 'source',
);
const sources = sourceBlocks.flatMap((block) => block.data);
const hasContent = section.parsedTextBlocks.length > 0;
const { speechStatus, start, stop } = useSpeech({ text: speechMessage }); const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
const markdownOverrides: MarkdownToJSX.Options = { const markdownOverrides: MarkdownToJSX.Options = {
@@ -71,7 +84,7 @@ const MessageBox = ({
<div className="space-y-6"> <div className="space-y-6">
<div className={'w-full pt-8 break-words'}> <div className={'w-full pt-8 break-words'}>
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12"> <h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
{section.userMessage.content} {section.message.query}
</h2> </h2>
</div> </div>
@@ -80,8 +93,7 @@ const MessageBox = ({
ref={dividerRef} ref={dividerRef}
className="flex flex-col space-y-6 w-full lg:w-9/12" className="flex flex-col space-y-6 w-full lg:w-9/12"
> >
{section.sourceMessage && {sources.length > 0 && (
section.sourceMessage.sources.length > 0 && (
<div className="flex flex-col space-y-2"> <div className="flex flex-col space-y-2">
<div className="flex flex-row items-center space-x-2"> <div className="flex flex-row items-center space-x-2">
<BookCopy className="text-black dark:text-white" size={20} /> <BookCopy className="text-black dark:text-white" size={20} />
@@ -89,12 +101,42 @@ const MessageBox = ({
Sources Sources
</h3> </h3>
</div> </div>
<MessageSources sources={section.sourceMessage.sources} /> <MessageSources sources={sources} />
</div>
)}
{section.message.responseBlocks
.filter(
(block): block is ResearchBlock =>
block.type === 'research' && block.data.subSteps.length > 0,
)
.map((researchBlock) => (
<div key={researchBlock.id} className="flex flex-col space-y-2">
<AssistantSteps
block={researchBlock}
status={section.message.status}
/>
</div>
))}
{section.widgets.length > 0 && <Renderer widgets={section.widgets} />}
{isLast &&
loading &&
!researchEnded &&
!section.message.responseBlocks.some(
(b) => b.type === 'research' && b.data.subSteps.length > 0,
) && (
<div className="flex items-center gap-2 p-3 rounded-lg bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200">
<Disc3 className="w-4 h-4 text-black dark:text-white animate-spin" />
<span className="text-sm text-black/70 dark:text-white/70">
Brainstorming...
</span>
</div> </div>
)} )}
<div className="flex flex-col space-y-2"> <div className="flex flex-col space-y-2">
{section.sourceMessage && ( {sources.length > 0 && (
<div className="flex flex-row items-center space-x-2"> <div className="flex flex-row items-center space-x-2">
<Disc3 <Disc3
className={cn( className={cn(
@@ -109,7 +151,7 @@ const MessageBox = ({
</div> </div>
)} )}
{section.assistantMessage && ( {hasContent && (
<> <>
<Markdown <Markdown
className={cn( className={cn(
@@ -122,18 +164,15 @@ const MessageBox = ({
</Markdown> </Markdown>
{loading && isLast ? null : ( {loading && isLast ? null : (
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4 -mx-2"> <div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4">
<div className="flex flex-row items-center space-x-1"> <div className="flex flex-row items-center -ml-2">
<Rewrite <Rewrite
rewrite={rewrite} rewrite={rewrite}
messageId={section.assistantMessage.messageId} messageId={section.message.messageId}
/> />
</div> </div>
<div className="flex flex-row items-center space-x-1"> <div className="flex flex-row items-center -mr-2">
<Copy <Copy initialMessage={parsedMessage} section={section} />
initialMessage={section.assistantMessage.content}
section={section}
/>
<button <button
onClick={() => { onClick={() => {
if (speechStatus === 'started') { if (speechStatus === 'started') {
@@ -142,12 +181,12 @@ const MessageBox = ({
start(); start();
} }
}} }}
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white" className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
> >
{speechStatus === 'started' ? ( {speechStatus === 'started' ? (
<StopCircle size={18} /> <StopCircle size={16} />
) : ( ) : (
<Volume2 size={18} /> <Volume2 size={16} />
)} )}
</button> </button>
</div> </div>
@@ -157,9 +196,9 @@ const MessageBox = ({
{isLast && {isLast &&
section.suggestions && section.suggestions &&
section.suggestions.length > 0 && section.suggestions.length > 0 &&
section.assistantMessage && hasContent &&
!loading && ( !loading && (
<div className="mt-8 pt-6 border-t border-light-200/50 dark:border-dark-200/50"> <div className="mt-6">
<div className="flex flex-row items-center space-x-2 mb-4"> <div className="flex flex-row items-center space-x-2 mb-4">
<Layers3 <Layers3
className="text-black dark:text-white" className="text-black dark:text-white"
@@ -173,20 +212,24 @@ const MessageBox = ({
{section.suggestions.map( {section.suggestions.map(
(suggestion: string, i: number) => ( (suggestion: string, i: number) => (
<div key={i}> <div key={i}>
{i > 0 && ( <div className="h-px bg-light-200/40 dark:bg-dark-200/40" />
<div className="h-px bg-light-200/40 dark:bg-dark-200/40 mx-3" />
)}
<button <button
onClick={() => sendMessage(suggestion)} onClick={() => sendMessage(suggestion)}
className="group w-full px-3 py-4 text-left transition-colors duration-200" className="group w-full py-4 text-left transition-colors duration-200"
> >
<div className="flex items-center justify-between gap-3"> <div className="flex items-center justify-between gap-3">
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-[#24A0ED] transition-colors duration-200 leading-relaxed"> <div className="flex flex-row space-x-3 items-center ">
<CornerDownRight
size={17}
className="group-hover:text-sky-400 transition-colors duration-200"
/>
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-sky-400 transition-colors duration-200 leading-relaxed">
{suggestion} {suggestion}
</p> </p>
</div>
<Plus <Plus
size={16} size={16}
className="text-black/40 dark:text-white/40 group-hover:text-[#24A0ED] transition-colors duration-200 flex-shrink-0" className="text-black/40 dark:text-white/40 group-hover:text-sky-400 transition-colors duration-200 flex-shrink-0"
/> />
</div> </div>
</button> </button>
@@ -201,17 +244,17 @@ const MessageBox = ({
</div> </div>
</div> </div>
{section.assistantMessage && ( {hasContent && (
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4"> <div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
<SearchImages <SearchImages
query={section.userMessage.content} query={section.message.query}
chatHistory={chatTurns.slice(0, sectionIndex * 2)} chatHistory={messages}
messageId={section.assistantMessage.messageId} messageId={section.message.messageId}
/> />
<SearchVideos <SearchVideos
chatHistory={chatTurns.slice(0, sectionIndex * 2)} chatHistory={messages}
query={section.userMessage.content} query={section.message.query}
messageId={section.assistantMessage.messageId} messageId={section.message.messageId}
/> />
</div> </div>
)} )}

View File

@@ -8,17 +8,16 @@ import {
PopoverPanel, PopoverPanel,
Transition, Transition,
} from '@headlessui/react'; } from '@headlessui/react';
import { Fragment, useEffect, useState } from 'react'; import { Fragment, useEffect, useMemo, useState } from 'react';
import { MinimalProvider } from '@/lib/models/types'; import { MinimalProvider } from '@/lib/models/types';
import { useChat } from '@/lib/hooks/useChat';
const ModelSelector = () => { const ModelSelector = () => {
const [providers, setProviders] = useState<MinimalProvider[]>([]); const [providers, setProviders] = useState<MinimalProvider[]>([]);
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
const [searchQuery, setSearchQuery] = useState(''); const [searchQuery, setSearchQuery] = useState('');
const [selectedModel, setSelectedModel] = useState<{
providerId: string; const { setChatModelProvider, chatModelProvider } = useChat();
modelKey: string;
} | null>(null);
useEffect(() => { useEffect(() => {
const loadProviders = async () => { const loadProviders = async () => {
@@ -30,28 +29,8 @@ const ModelSelector = () => {
throw new Error('Failed to fetch providers'); throw new Error('Failed to fetch providers');
} }
const data = await res.json(); const data: { providers: MinimalProvider[] } = await res.json();
setProviders(data.providers || []); setProviders(data.providers);
const savedProviderId = localStorage.getItem('chatModelProviderId');
const savedModelKey = localStorage.getItem('chatModelKey');
if (savedProviderId && savedModelKey) {
setSelectedModel({
providerId: savedProviderId,
modelKey: savedModelKey,
});
} else if (data.providers && data.providers.length > 0) {
const firstProvider = data.providers.find(
(p: MinimalProvider) => p.chatModels.length > 0,
);
if (firstProvider && firstProvider.chatModels[0]) {
setSelectedModel({
providerId: firstProvider.id,
modelKey: firstProvider.chatModels[0].key,
});
}
}
} catch (error) { } catch (error) {
console.error('Error loading providers:', error); console.error('Error loading providers:', error);
} finally { } finally {
@@ -62,13 +41,32 @@ const ModelSelector = () => {
loadProviders(); loadProviders();
}, []); }, []);
const orderedProviders = useMemo(() => {
if (!chatModelProvider?.providerId) return providers;
const currentProviderIndex = providers.findIndex(
(p) => p.id === chatModelProvider.providerId,
);
if (currentProviderIndex === -1) {
return providers;
}
const selectedProvider = providers[currentProviderIndex];
const remainingProviders = providers.filter(
(_, index) => index !== currentProviderIndex,
);
return [selectedProvider, ...remainingProviders];
}, [providers, chatModelProvider]);
const handleModelSelect = (providerId: string, modelKey: string) => { const handleModelSelect = (providerId: string, modelKey: string) => {
setSelectedModel({ providerId, modelKey }); setChatModelProvider({ providerId, key: modelKey });
localStorage.setItem('chatModelProviderId', providerId); localStorage.setItem('chatModelProviderId', providerId);
localStorage.setItem('chatModelKey', modelKey); localStorage.setItem('chatModelKey', modelKey);
}; };
const filteredProviders = providers const filteredProviders = orderedProviders
.map((provider) => ({ .map((provider) => ({
...provider, ...provider,
chatModels: provider.chatModels.filter( chatModels: provider.chatModels.filter(
@@ -140,15 +138,16 @@ const ModelSelector = () => {
<div className="flex flex-col px-2 py-2 space-y-0.5"> <div className="flex flex-col px-2 py-2 space-y-0.5">
{provider.chatModels.map((model) => ( {provider.chatModels.map((model) => (
<PopoverButton <button
key={model.key} key={model.key}
onClick={() => onClick={() =>
handleModelSelect(provider.id, model.key) handleModelSelect(provider.id, model.key)
} }
type="button"
className={cn( className={cn(
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group', 'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
selectedModel?.providerId === provider.id && chatModelProvider?.providerId === provider.id &&
selectedModel?.modelKey === model.key chatModelProvider?.key === model.key
? 'bg-light-secondary dark:bg-dark-secondary' ? 'bg-light-secondary dark:bg-dark-secondary'
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary', : 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
)} )}
@@ -158,8 +157,9 @@ const ModelSelector = () => {
size={15} size={15}
className={cn( className={cn(
'shrink-0', 'shrink-0',
selectedModel?.providerId === provider.id && chatModelProvider?.providerId ===
selectedModel?.modelKey === model.key provider.id &&
chatModelProvider?.key === model.key
? 'text-sky-500' ? 'text-sky-500'
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70', : 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
)} )}
@@ -167,8 +167,9 @@ const ModelSelector = () => {
<p <p
className={cn( className={cn(
'text-sm truncate', 'text-sm truncate',
selectedModel?.providerId === provider.id && chatModelProvider?.providerId ===
selectedModel?.modelKey === model.key provider.id &&
chatModelProvider?.key === model.key
? 'text-sky-500 font-medium' ? 'text-sky-500 font-medium'
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white', : 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
)} )}
@@ -176,7 +177,7 @@ const ModelSelector = () => {
{model.name} {model.name}
</p> </p>
</div> </div>
</PopoverButton> </button>
))} ))}
</div> </div>

View File

@@ -24,7 +24,7 @@ const OptimizationModes = [
}, },
{ {
key: 'quality', key: 'quality',
title: 'Quality (Soon)', title: 'Quality',
description: 'Get the most thorough and accurate answer', description: 'Get the most thorough and accurate answer',
icon: ( icon: (
<Star <Star
@@ -75,13 +75,11 @@ const Optimization = () => {
<PopoverButton <PopoverButton
onClick={() => setOptimizationMode(mode.key)} onClick={() => setOptimizationMode(mode.key)}
key={i} key={i}
disabled={mode.key === 'quality'}
className={cn( className={cn(
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition focus:outline-none', 'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition focus:outline-none',
optimizationMode === mode.key optimizationMode === mode.key
? 'bg-light-secondary dark:bg-dark-secondary' ? 'bg-light-secondary dark:bg-dark-secondary'
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary', : 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
mode.key === 'quality' && 'opacity-50 cursor-not-allowed',
)} )}
> >
<div className="flex flex-row items-center space-x-1 text-black dark:text-white"> <div className="flex flex-row items-center space-x-1 text-black dark:text-white">

View File

@@ -6,11 +6,11 @@ import {
Transition, Transition,
TransitionChild, TransitionChild,
} from '@headlessui/react'; } from '@headlessui/react';
import { Document } from '@langchain/core/documents';
import { File } from 'lucide-react'; import { File } from 'lucide-react';
import { Fragment, useState } from 'react'; import { Fragment, useState } from 'react';
import { Chunk } from '@/lib/types';
const MessageSources = ({ sources }: { sources: Document[] }) => { const MessageSources = ({ sources }: { sources: Chunk[] }) => {
const [isDialogOpen, setIsDialogOpen] = useState(false); const [isDialogOpen, setIsDialogOpen] = useState(false);
const closeModal = () => { const closeModal = () => {

View File

@@ -11,6 +11,7 @@ import {
} from '@headlessui/react'; } from '@headlessui/react';
import jsPDF from 'jspdf'; import jsPDF from 'jspdf';
import { useChat, Section } from '@/lib/hooks/useChat'; import { useChat, Section } from '@/lib/hooks/useChat';
import { SourceBlock } from '@/lib/types';
const downloadFile = (filename: string, content: string, type: string) => { const downloadFile = (filename: string, content: string, type: string) => {
const blob = new Blob([content], { type }); const blob = new Blob([content], { type });
@@ -28,35 +29,41 @@ const downloadFile = (filename: string, content: string, type: string) => {
const exportAsMarkdown = (sections: Section[], title: string) => { const exportAsMarkdown = (sections: Section[], title: string) => {
const date = new Date( const date = new Date(
sections[0]?.userMessage?.createdAt || Date.now(), sections[0].message.createdAt || Date.now(),
).toLocaleString(); ).toLocaleString();
let md = `# 💬 Chat Export: ${title}\n\n`; let md = `# 💬 Chat Export: ${title}\n\n`;
md += `*Exported on: ${date}*\n\n---\n`; md += `*Exported on: ${date}*\n\n---\n`;
sections.forEach((section, idx) => { sections.forEach((section, idx) => {
if (section.userMessage) {
md += `\n---\n`; md += `\n---\n`;
md += `**🧑 User** md += `**🧑 User**
`; `;
md += `*${new Date(section.userMessage.createdAt).toLocaleString()}*\n\n`; md += `*${new Date(section.message.createdAt).toLocaleString()}*\n\n`;
md += `> ${section.userMessage.content.replace(/\n/g, '\n> ')}\n`; md += `> ${section.message.query.replace(/\n/g, '\n> ')}\n`;
}
if (section.assistantMessage) { if (section.message.responseBlocks.length > 0) {
md += `\n---\n`; md += `\n---\n`;
md += `**🤖 Assistant** md += `**🤖 Assistant**
`; `;
md += `*${new Date(section.assistantMessage.createdAt).toLocaleString()}*\n\n`; md += `*${new Date(section.message.createdAt).toLocaleString()}*\n\n`;
md += `> ${section.assistantMessage.content.replace(/\n/g, '\n> ')}\n`; md += `> ${section.message.responseBlocks
.filter((b) => b.type === 'text')
.map((block) => block.data)
.join('\n')
.replace(/\n/g, '\n> ')}\n`;
} }
const sourceResponseBlock = section.message.responseBlocks.find(
(block) => block.type === 'source',
) as SourceBlock | undefined;
if ( if (
section.sourceMessage && sourceResponseBlock &&
section.sourceMessage.sources && sourceResponseBlock.data &&
section.sourceMessage.sources.length > 0 sourceResponseBlock.data.length > 0
) { ) {
md += `\n**Citations:**\n`; md += `\n**Citations:**\n`;
section.sourceMessage.sources.forEach((src: any, i: number) => { sourceResponseBlock.data.forEach((src: any, i: number) => {
const url = src.metadata?.url || ''; const url = src.metadata?.url || '';
md += `- [${i + 1}] [${url}](${url})\n`; md += `- [${i + 1}] [${url}](${url})\n`;
}); });
@@ -69,7 +76,7 @@ const exportAsMarkdown = (sections: Section[], title: string) => {
const exportAsPDF = (sections: Section[], title: string) => { const exportAsPDF = (sections: Section[], title: string) => {
const doc = new jsPDF(); const doc = new jsPDF();
const date = new Date( const date = new Date(
sections[0]?.userMessage?.createdAt || Date.now(), sections[0]?.message?.createdAt || Date.now(),
).toLocaleString(); ).toLocaleString();
let y = 15; let y = 15;
const pageHeight = doc.internal.pageSize.height; const pageHeight = doc.internal.pageSize.height;
@@ -86,7 +93,6 @@ const exportAsPDF = (sections: Section[], title: string) => {
doc.setTextColor(30); doc.setTextColor(30);
sections.forEach((section, idx) => { sections.forEach((section, idx) => {
if (section.userMessage) {
if (y > pageHeight - 30) { if (y > pageHeight - 30) {
doc.addPage(); doc.addPage();
y = 15; y = 15;
@@ -96,15 +102,11 @@ const exportAsPDF = (sections: Section[], title: string) => {
doc.setFont('helvetica', 'normal'); doc.setFont('helvetica', 'normal');
doc.setFontSize(10); doc.setFontSize(10);
doc.setTextColor(120); doc.setTextColor(120);
doc.text( doc.text(`${new Date(section.message.createdAt).toLocaleString()}`, 40, y);
`${new Date(section.userMessage.createdAt).toLocaleString()}`,
40,
y,
);
y += 6; y += 6;
doc.setTextColor(30); doc.setTextColor(30);
doc.setFontSize(12); doc.setFontSize(12);
const userLines = doc.splitTextToSize(section.userMessage.content, 180); const userLines = doc.splitTextToSize(section.message.query, 180);
for (let i = 0; i < userLines.length; i++) { for (let i = 0; i < userLines.length; i++) {
if (y > pageHeight - 20) { if (y > pageHeight - 20) {
doc.addPage(); doc.addPage();
@@ -121,9 +123,8 @@ const exportAsPDF = (sections: Section[], title: string) => {
} }
doc.line(10, y, 200, y); doc.line(10, y, 200, y);
y += 4; y += 4;
}
if (section.assistantMessage) { if (section.message.responseBlocks.length > 0) {
if (y > pageHeight - 30) { if (y > pageHeight - 30) {
doc.addPage(); doc.addPage();
y = 15; y = 15;
@@ -134,7 +135,7 @@ const exportAsPDF = (sections: Section[], title: string) => {
doc.setFontSize(10); doc.setFontSize(10);
doc.setTextColor(120); doc.setTextColor(120);
doc.text( doc.text(
`${new Date(section.assistantMessage.createdAt).toLocaleString()}`, `${new Date(section.message.createdAt).toLocaleString()}`,
40, 40,
y, y,
); );
@@ -142,7 +143,7 @@ const exportAsPDF = (sections: Section[], title: string) => {
doc.setTextColor(30); doc.setTextColor(30);
doc.setFontSize(12); doc.setFontSize(12);
const assistantLines = doc.splitTextToSize( const assistantLines = doc.splitTextToSize(
section.assistantMessage.content, section.parsedTextBlocks.join('\n'),
180, 180,
); );
for (let i = 0; i < assistantLines.length; i++) { for (let i = 0; i < assistantLines.length; i++) {
@@ -154,10 +155,14 @@ const exportAsPDF = (sections: Section[], title: string) => {
y += 6; y += 6;
} }
const sourceResponseBlock = section.message.responseBlocks.find(
(block) => block.type === 'source',
) as SourceBlock | undefined;
if ( if (
section.sourceMessage && sourceResponseBlock &&
section.sourceMessage.sources && sourceResponseBlock.data &&
section.sourceMessage.sources.length > 0 sourceResponseBlock.data.length > 0
) { ) {
doc.setFontSize(11); doc.setFontSize(11);
doc.setTextColor(80); doc.setTextColor(80);
@@ -167,7 +172,7 @@ const exportAsPDF = (sections: Section[], title: string) => {
} }
doc.text('Citations:', 12, y); doc.text('Citations:', 12, y);
y += 5; y += 5;
section.sourceMessage.sources.forEach((src: any, i: number) => { sourceResponseBlock.data.forEach((src: any, i: number) => {
const url = src.metadata?.url || ''; const url = src.metadata?.url || '';
if (y > pageHeight - 15) { if (y > pageHeight - 15) {
doc.addPage(); doc.addPage();
@@ -198,15 +203,15 @@ const Navbar = () => {
const { sections, chatId } = useChat(); const { sections, chatId } = useChat();
useEffect(() => { useEffect(() => {
if (sections.length > 0 && sections[0].userMessage) { if (sections.length > 0 && sections[0].message) {
const newTitle = const newTitle =
sections[0].userMessage.content.length > 20 sections[0].message.query.substring(0, 30) + '...' ||
? `${sections[0].userMessage.content.substring(0, 20).trim()}...` 'New Conversation';
: sections[0].userMessage.content;
setTitle(newTitle); setTitle(newTitle);
const newTimeAgo = formatTimeDifference( const newTimeAgo = formatTimeDifference(
new Date(), new Date(),
sections[0].userMessage.createdAt, sections[0].message.createdAt,
); );
setTimeAgo(newTimeAgo); setTimeAgo(newTimeAgo);
} }
@@ -214,10 +219,10 @@ const Navbar = () => {
useEffect(() => { useEffect(() => {
const intervalId = setInterval(() => { const intervalId = setInterval(() => {
if (sections.length > 0 && sections[0].userMessage) { if (sections.length > 0 && sections[0].message) {
const newTimeAgo = formatTimeDifference( const newTimeAgo = formatTimeDifference(
new Date(), new Date(),
sections[0].userMessage.createdAt, sections[0].message.createdAt,
); );
setTimeAgo(newTimeAgo); setTimeAgo(newTimeAgo);
} }

View File

@@ -97,7 +97,7 @@ const AddModel = ({
> >
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg"> <DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
<div className="px-6 pt-6 pb-4"> <div className="px-6 pt-6 pb-4">
<h3 className="text-black/90 dark:text-white/90 font-medium"> <h3 className="text-black/90 dark:text-white/90 font-medium text-sm">
Add new {type === 'chat' ? 'chat' : 'embedding'} model Add new {type === 'chat' ? 'chat' : 'embedding'} model
</h3> </h3>
</div> </div>
@@ -115,7 +115,7 @@ const AddModel = ({
<input <input
value={modelName} value={modelName}
onChange={(e) => setModelName(e.target.value)} onChange={(e) => setModelName(e.target.value)}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder="e.g., GPT-4" placeholder="e.g., GPT-4"
type="text" type="text"
required required
@@ -128,7 +128,7 @@ const AddModel = ({
<input <input
value={modelKey} value={modelKey}
onChange={(e) => setModelKey(e.target.value)} onChange={(e) => setModelKey(e.target.value)}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder="e.g., gpt-4" placeholder="e.g., gpt-4"
type="text" type="text"
required required
@@ -140,7 +140,7 @@ const AddModel = ({
<button <button
type="submit" type="submit"
disabled={loading} disabled={loading}
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200" className="px-4 py-2 rounded-lg text-[13px] bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
> >
{loading ? ( {loading ? (
<Loader2 className="animate-spin" size={16} /> <Loader2 className="animate-spin" size={16} />

View File

@@ -82,10 +82,10 @@ const AddProvider = ({
setProviders((prev) => [...prev, data]); setProviders((prev) => [...prev, data]);
toast.success('Provider added successfully.'); toast.success('Connection added successfully.');
} catch (error) { } catch (error) {
console.error('Error adding provider:', error); console.error('Error adding provider:', error);
toast.error('Failed to add provider.'); toast.error('Failed to add connection.');
} finally { } finally {
setLoading(false); setLoading(false);
setOpen(false); setOpen(false);
@@ -96,10 +96,10 @@ const AddProvider = ({
<> <>
<button <button
onClick={() => setOpen(true)} onClick={() => setOpen(true)}
className="px-3 md:px-4 py-1.5 md:py-2 rounded-lg text-xs sm:text-sm border border-light-200 dark:border-dark-200 text-black dark:text-white bg-light-secondary/50 dark:bg-dark-secondary/50 hover:bg-light-secondary hover:dark:bg-dark-secondary hover:border-light-300 hover:dark:border-dark-300 flex flex-row items-center space-x-1 active:scale-95 transition duration-200" className="px-3 md:px-4 py-1.5 md:py-2 rounded-lg text-xs sm:text-xs border border-light-200 dark:border-dark-200 text-black dark:text-white bg-light-secondary/50 dark:bg-dark-secondary/50 hover:bg-light-secondary hover:dark:bg-dark-secondary hover:border-light-300 hover:dark:border-dark-300 flex flex-row items-center space-x-1 active:scale-95 transition duration-200"
> >
<Plus className="w-3.5 h-3.5 md:w-4 md:h-4" /> <Plus className="w-3.5 h-3.5 md:w-4 md:h-4" />
<span>Add Provider</span> <span>Add Connection</span>
</button> </button>
<AnimatePresence> <AnimatePresence>
{open && ( {open && (
@@ -119,8 +119,8 @@ const AddProvider = ({
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg"> <DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
<form onSubmit={handleSubmit} className="flex flex-col flex-1"> <form onSubmit={handleSubmit} className="flex flex-col flex-1">
<div className="px-6 pt-6 pb-4"> <div className="px-6 pt-6 pb-4">
<h3 className="text-black/90 dark:text-white/90 font-medium"> <h3 className="text-black/90 dark:text-white/90 font-medium text-sm">
Add new provider Add new connection
</h3> </h3>
</div> </div>
<div className="border-t border-light-200 dark:border-dark-200" /> <div className="border-t border-light-200 dark:border-dark-200" />
@@ -128,7 +128,7 @@ const AddProvider = ({
<div className="flex flex-col space-y-4"> <div className="flex flex-col space-y-4">
<div className="flex flex-col items-start space-y-2"> <div className="flex flex-col items-start space-y-2">
<label className="text-xs text-black/70 dark:text-white/70"> <label className="text-xs text-black/70 dark:text-white/70">
Select provider type Select connection type
</label> </label>
<Select <Select
value={selectedProvider ?? ''} value={selectedProvider ?? ''}
@@ -149,13 +149,13 @@ const AddProvider = ({
className="flex flex-col items-start space-y-2" className="flex flex-col items-start space-y-2"
> >
<label className="text-xs text-black/70 dark:text-white/70"> <label className="text-xs text-black/70 dark:text-white/70">
Name* Connection Name*
</label> </label>
<input <input
value={name} value={name}
onChange={(e) => setName(e.target.value)} onChange={(e) => setName(e.target.value)}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder={'Provider Name'} placeholder={'e.g., My OpenAI Connection'}
type="text" type="text"
required={true} required={true}
/> />
@@ -178,7 +178,7 @@ const AddProvider = ({
[field.key]: event.target.value, [field.key]: event.target.value,
})) }))
} }
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder={ placeholder={
(field as StringUIConfigField).placeholder (field as StringUIConfigField).placeholder
} }
@@ -194,12 +194,12 @@ const AddProvider = ({
<button <button
type="submit" type="submit"
disabled={loading} disabled={loading}
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200" className="px-4 py-2 rounded-lg text-[13px] bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
> >
{loading ? ( {loading ? (
<Loader2 className="animate-spin" size={16} /> <Loader2 className="animate-spin" size={16} />
) : ( ) : (
'Add Provider' 'Add Connection'
)} )}
</button> </button>
</div> </div>

View File

@@ -34,10 +34,10 @@ const DeleteProvider = ({
return prev.filter((p) => p.id !== modelProvider.id); return prev.filter((p) => p.id !== modelProvider.id);
}); });
toast.success('Provider deleted successfully.'); toast.success('Connection deleted successfully.');
} catch (error) { } catch (error) {
console.error('Error deleting provider:', error); console.error('Error deleting provider:', error);
toast.error('Failed to delete provider.'); toast.error('Failed to delete connection.');
} finally { } finally {
setLoading(false); setLoading(false);
} }
@@ -51,7 +51,7 @@ const DeleteProvider = ({
setOpen(true); setOpen(true);
}} }}
className="group p-1.5 rounded-md hover:bg-light-200 hover:dark:bg-dark-200 transition-colors group" className="group p-1.5 rounded-md hover:bg-light-200 hover:dark:bg-dark-200 transition-colors group"
title="Delete provider" title="Delete connection"
> >
<Trash2 <Trash2
size={14} size={14}
@@ -76,14 +76,15 @@ const DeleteProvider = ({
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg"> <DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
<div className="px-6 pt-6 pb-4"> <div className="px-6 pt-6 pb-4">
<h3 className="text-black/90 dark:text-white/90 font-medium"> <h3 className="text-black/90 dark:text-white/90 font-medium">
Delete provider Delete connection
</h3> </h3>
</div> </div>
<div className="border-t border-light-200 dark:border-dark-200" /> <div className="border-t border-light-200 dark:border-dark-200" />
<div className="flex-1 overflow-y-auto px-6 py-4"> <div className="flex-1 overflow-y-auto px-6 py-4">
<p className="text-SM text-black/60 dark:text-white/60"> <p className="text-sm text-black/60 dark:text-white/60">
Are you sure you want to delete the provider &quot; Are you sure you want to delete the connection &quot;
{modelProvider.name}&quot;? This action cannot be undone. {modelProvider.name}&quot;? This action cannot be undone.
All associated models will also be removed.
</p> </p>
</div> </div>
<div className="px-6 py-6 flex justify-end space-x-2"> <div className="px-6 py-6 flex justify-end space-x-2">

View File

@@ -1,7 +1,7 @@
import { UIConfigField, ConfigModelProvider } from '@/lib/config/types'; import { UIConfigField, ConfigModelProvider } from '@/lib/config/types';
import { cn } from '@/lib/utils'; import { cn } from '@/lib/utils';
import { AnimatePresence, motion } from 'framer-motion'; import { AnimatePresence, motion } from 'framer-motion';
import { AlertCircle, ChevronDown, Pencil, Trash2, X } from 'lucide-react'; import { AlertCircle, Plug2, Plus, Pencil, Trash2, X } from 'lucide-react';
import { useState } from 'react'; import { useState } from 'react';
import { toast } from 'sonner'; import { toast } from 'sonner';
import AddModel from './AddModelDialog'; import AddModel from './AddModelDialog';
@@ -17,7 +17,7 @@ const ModelProvider = ({
fields: UIConfigField[]; fields: UIConfigField[];
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>; setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
}) => { }) => {
const [open, setOpen] = useState(false); const [open, setOpen] = useState(true);
const handleModelDelete = async ( const handleModelDelete = async (
type: 'chat' | 'embedding', type: 'chat' | 'embedding',
@@ -66,23 +66,35 @@ const ModelProvider = ({
} }
}; };
const modelCount =
modelProvider.chatModels.filter((m) => m.key !== 'error').length +
modelProvider.embeddingModels.filter((m) => m.key !== 'error').length;
const hasError =
modelProvider.chatModels.some((m) => m.key === 'error') ||
modelProvider.embeddingModels.some((m) => m.key === 'error');
return ( return (
<div <div
key={modelProvider.id} key={modelProvider.id}
className="border border-light-200 dark:border-dark-200 rounded-lg overflow-hidden" className="border border-light-200 dark:border-dark-200 rounded-lg overflow-hidden bg-light-primary dark:bg-dark-primary"
> >
<div <div className="px-5 py-3.5 flex flex-row justify-between w-full items-center border-b border-light-200 dark:border-dark-200 bg-light-secondary/30 dark:bg-dark-secondary/30">
className={cn( <div className="flex items-center gap-2.5">
'group px-5 py-4 flex flex-row justify-between w-full cursor-pointer hover:bg-light-secondary hover:dark:bg-dark-secondary transition duration-200 items-center', <div className="p-1.5 rounded-md bg-sky-500/10 dark:bg-sky-500/10">
!open && 'rounded-lg', <Plug2 size={14} className="text-sky-500" />
)} </div>
onClick={() => setOpen(!open)} <div className="flex flex-col">
> <p className="text-sm lg:text-sm text-black dark:text-white font-medium">
<p className="text-black dark:text-white font-medium">
{modelProvider.name} {modelProvider.name}
</p> </p>
<div className="flex items-center gap-4"> {modelCount > 0 && (
<div className="flex flex-row items-center"> <p className="text-[10px] lg:text-[11px] text-black/50 dark:text-white/50">
{modelCount} model{modelCount !== 1 ? 's' : ''} configured
</p>
)}
</div>
</div>
<div className="flex flex-row items-center gap-1">
<UpdateProvider <UpdateProvider
fields={fields} fields={fields}
modelProvider={modelProvider} modelProvider={modelProvider}
@@ -93,123 +105,118 @@ const ModelProvider = ({
setProviders={setProviders} setProviders={setProviders}
/> />
</div> </div>
<ChevronDown
size={16}
className={cn(
open ? 'rotate-180' : '',
'transition duration-200 text-black/70 dark:text-white/70 group-hover:text-sky-500',
)}
/>
</div> </div>
</div>
<AnimatePresence>
{open && (
<motion.div
initial={{ height: 0, opacity: 0 }}
animate={{ height: 'auto', opacity: 1 }}
exit={{ height: 0, opacity: 0 }}
transition={{ duration: 0.1 }}
>
<div className="border-t border-light-200 dark:border-dark-200" />
<div className="flex flex-col gap-y-4 px-5 py-4"> <div className="flex flex-col gap-y-4 px-5 py-4">
{modelProvider.chatModels.length > 0 && (
<div className="flex flex-col gap-y-2"> <div className="flex flex-col gap-y-2">
<div className="flex flex-row w-full justify-between items-center"> <div className="flex flex-row w-full justify-between items-center">
<p className="text-xs text-black/70 dark:text-white/70"> <p className="text-[11px] lg:text-[11px] font-medium text-black/70 dark:text-white/70 uppercase tracking-wide">
Chat models Chat Models
</p> </p>
{!modelProvider.chatModels.some((m) => m.key === 'error') && (
<AddModel <AddModel
providerId={modelProvider.id} providerId={modelProvider.id}
setProviders={setProviders} setProviders={setProviders}
type="chat" type="chat"
/> />
)}
</div> </div>
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
{modelProvider.chatModels.some((m) => m.key === 'error') ? ( {modelProvider.chatModels.some((m) => m.key === 'error') ? (
<div className="flex flex-row items-center gap-2 text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30"> <div className="flex flex-row items-center gap-2 text-xs lg:text-xs text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
<AlertCircle size={16} className="shrink-0" /> <AlertCircle size={16} className="shrink-0" />
<span className="break-words"> <span className="break-words">
{ {
modelProvider.chatModels.find( modelProvider.chatModels.find((m) => m.key === 'error')
(m) => m.key === 'error', ?.name
)?.name
} }
</span> </span>
</div> </div>
) : ( ) : modelProvider.chatModels.filter((m) => m.key !== 'error')
.length === 0 && !hasError ? (
<div className="flex flex-col items-center justify-center py-4 px-4 rounded-lg border-2 border-dashed border-light-200 dark:border-dark-200 bg-light-secondary/20 dark:bg-dark-secondary/20">
<p className="text-xs text-black/50 dark:text-white/50 text-center">
No chat models configured
</p>
</div>
) : modelProvider.chatModels.filter((m) => m.key !== 'error')
.length > 0 ? (
<div className="flex flex-row flex-wrap gap-2"> <div className="flex flex-row flex-wrap gap-2">
{modelProvider.chatModels.map((model, index) => ( {modelProvider.chatModels.map((model, index) => (
<div <div
key={`${modelProvider.id}-chat-${model.key}-${index}`} key={`${modelProvider.id}-chat-${model.key}-${index}`}
className="flex flex-row items-center space-x-1 text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5" className="flex flex-row items-center space-x-1.5 text-xs lg:text-xs text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5 border border-light-200 dark:border-dark-200"
> >
<span>{model.name}</span> <span>{model.name}</span>
<button <button
onClick={() => { onClick={() => {
handleModelDelete('chat', model.key); handleModelDelete('chat', model.key);
}} }}
className="hover:text-red-500 dark:hover:text-red-400 transition-colors"
> >
<X size={12} /> <X size={12} />
</button> </button>
</div> </div>
))} ))}
</div> </div>
)} ) : null}
</div> </div>
</div> </div>
)}
{modelProvider.embeddingModels.length > 0 && (
<div className="flex flex-col gap-y-2"> <div className="flex flex-col gap-y-2">
<div className="flex flex-row w-full justify-between items-center"> <div className="flex flex-row w-full justify-between items-center">
<p className="text-xs text-black/70 dark:text-white/70"> <p className="text-[11px] lg:text-[11px] font-medium text-black/70 dark:text-white/70 uppercase tracking-wide">
Embedding models Embedding Models
</p> </p>
{!modelProvider.embeddingModels.some((m) => m.key === 'error') && (
<AddModel <AddModel
providerId={modelProvider.id} providerId={modelProvider.id}
setProviders={setProviders} setProviders={setProviders}
type="embedding" type="embedding"
/> />
)}
</div> </div>
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
{modelProvider.embeddingModels.some( {modelProvider.embeddingModels.some((m) => m.key === 'error') ? (
(m) => m.key === 'error', <div className="flex flex-row items-center gap-2 text-xs lg:text-xs text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
) ? (
<div className="flex flex-row items-center gap-2 text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
<AlertCircle size={16} className="shrink-0" /> <AlertCircle size={16} className="shrink-0" />
<span className="break-words"> <span className="break-words">
{ {
modelProvider.embeddingModels.find( modelProvider.embeddingModels.find((m) => m.key === 'error')
(m) => m.key === 'error', ?.name
)?.name
} }
</span> </span>
</div> </div>
) : ( ) : modelProvider.embeddingModels.filter((m) => m.key !== 'error')
.length === 0 && !hasError ? (
<div className="flex flex-col items-center justify-center py-4 px-4 rounded-lg border-2 border-dashed border-light-200 dark:border-dark-200 bg-light-secondary/20 dark:bg-dark-secondary/20">
<p className="text-xs text-black/50 dark:text-white/50 text-center">
No embedding models configured
</p>
</div>
) : modelProvider.embeddingModels.filter((m) => m.key !== 'error')
.length > 0 ? (
<div className="flex flex-row flex-wrap gap-2"> <div className="flex flex-row flex-wrap gap-2">
{modelProvider.embeddingModels.map((model, index) => ( {modelProvider.embeddingModels.map((model, index) => (
<div <div
key={`${modelProvider.id}-embedding-${model.key}-${index}`} key={`${modelProvider.id}-embedding-${model.key}-${index}`}
className="flex flex-row items-center space-x-1 text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5" className="flex flex-row items-center space-x-1.5 text-xs lg:text-xs text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5 border border-light-200 dark:border-dark-200"
> >
<span>{model.name}</span> <span>{model.name}</span>
<button <button
onClick={() => { onClick={() => {
handleModelDelete('embedding', model.key); handleModelDelete('embedding', model.key);
}} }}
className="hover:text-red-500 dark:hover:text-red-400 transition-colors"
> >
<X size={12} /> <X size={12} />
</button> </button>
</div> </div>
))} ))}
</div> </div>
)} ) : null}
</div> </div>
</div> </div>
)}
</div> </div>
</motion.div>
)}
</AnimatePresence>
</div> </div>
); );
}; };

View File

@@ -1,5 +1,6 @@
import Select from '@/components/ui/Select'; import Select from '@/components/ui/Select';
import { ConfigModelProvider } from '@/lib/config/types'; import { ConfigModelProvider } from '@/lib/config/types';
import { useChat } from '@/lib/hooks/useChat';
import { useState } from 'react'; import { useState } from 'react';
import { toast } from 'sonner'; import { toast } from 'sonner';
@@ -11,23 +12,40 @@ const ModelSelect = ({
type: 'chat' | 'embedding'; type: 'chat' | 'embedding';
}) => { }) => {
const [selectedModel, setSelectedModel] = useState<string>( const [selectedModel, setSelectedModel] = useState<string>(
`${providers[0]?.id}/${providers[0].embeddingModels[0]?.key}`, type === 'chat'
? `${localStorage.getItem('chatModelProviderId')}/${localStorage.getItem('chatModelKey')}`
: `${localStorage.getItem('embeddingModelProviderId')}/${localStorage.getItem('embeddingModelKey')}`,
); );
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const { setChatModelProvider, setEmbeddingModelProvider } = useChat();
const handleSave = async (newValue: string) => { const handleSave = async (newValue: string) => {
setLoading(true); setLoading(true);
setSelectedModel(newValue); setSelectedModel(newValue);
try { try {
if (type === 'chat') { if (type === 'chat') {
localStorage.setItem('chatModelProviderId', newValue.split('/')[0]); const providerId = newValue.split('/')[0];
localStorage.setItem('chatModelKey', newValue.split('/')[1]); const modelKey = newValue.split('/').slice(1).join('/');
localStorage.setItem('chatModelProviderId', providerId);
localStorage.setItem('chatModelKey', modelKey);
setChatModelProvider({
providerId: providerId,
key: modelKey,
});
} else { } else {
localStorage.setItem( const providerId = newValue.split('/')[0];
'embeddingModelProviderId', const modelKey = newValue.split('/').slice(1).join('/');
newValue.split('/')[0],
); localStorage.setItem('embeddingModelProviderId', providerId);
localStorage.setItem('embeddingModelKey', newValue.split('/')[1]); localStorage.setItem('embeddingModelKey', modelKey);
setEmbeddingModelProvider({
providerId: providerId,
key: modelKey,
});
} }
} catch (error) { } catch (error) {
console.error('Error saving config:', error); console.error('Error saving config:', error);
@@ -38,16 +56,16 @@ const ModelSelect = ({
}; };
return ( return (
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80"> <section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
<div className="space-y-5"> <div className="space-y-3 lg:space-y-5">
<div> <div>
<h4 className="text-base text-black dark:text-white"> <h4 className="text-sm lg:text-sm text-black dark:text-white">
Select {type === 'chat' ? 'Chat Model' : 'Embedding Model'} Select {type === 'chat' ? 'Chat Model' : 'Embedding Model'}
</h4> </h4>
<p className="text-xs text-black/50 dark:text-white/50"> <p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
{type === 'chat' {type === 'chat'
? 'Select the model to use for chat responses' ? 'Choose which model to use for generating responses'
: 'Select the model to use for embeddings'} : 'Choose which model to use for generating embeddings'}
</p> </p>
</div> </div>
<Select <Select
@@ -68,7 +86,7 @@ const ModelSelect = ({
})), })),
) )
} }
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60 cursor-pointer capitalize pr-12" className="!text-xs lg:!text-[13px]"
loading={loading} loading={loading}
disabled={loading} disabled={loading}
/> />

View File

@@ -20,20 +20,58 @@ const Models = ({
return ( return (
<div className="flex-1 space-y-6 overflow-y-auto py-6"> <div className="flex-1 space-y-6 overflow-y-auto py-6">
<div className="flex flex-col px-6 gap-y-4"> <div className="flex flex-col px-6 gap-y-4">
<h3 className="text-sm text-black/70 dark:text-white/70"> <h3 className="text-xs lg:text-xs text-black/70 dark:text-white/70">
Select models Select models
</h3> </h3>
<ModelSelect providers={values} type="embedding" /> <ModelSelect
providers={values.filter((p) =>
p.chatModels.some((m) => m.key != 'error'),
)}
type="chat"
/>
<ModelSelect
providers={values.filter((p) =>
p.embeddingModels.some((m) => m.key != 'error'),
)}
type="embedding"
/>
</div> </div>
<div className="border-t border-light-200 dark:border-dark-200" /> <div className="border-t border-light-200 dark:border-dark-200" />
<div className="flex flex-row justify-between items-center px-6 "> <div className="flex flex-row justify-between items-center px-6 ">
<p className="text-sm text-black/70 dark:text-white/70"> <p className="text-xs lg:text-xs text-black/70 dark:text-white/70">
Manage model provider Manage connections
</p> </p>
<AddProvider modelProviders={fields} setProviders={setProviders} /> <AddProvider modelProviders={fields} setProviders={setProviders} />
</div> </div>
<div className="flex flex-col px-6 gap-y-4"> <div className="flex flex-col px-6 gap-y-4">
{providers.map((provider) => ( {providers.length === 0 ? (
<div className="flex flex-col items-center justify-center py-12 px-4 rounded-lg border-2 border-dashed border-light-200 dark:border-dark-200 bg-light-secondary/10 dark:bg-dark-secondary/10">
<div className="p-3 rounded-full bg-sky-500/10 dark:bg-sky-500/10 mb-3">
<svg
xmlns="http://www.w3.org/2000/svg"
className="w-8 h-8 text-sky-500"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M13 10V3L4 14h7v7l9-11h-7z"
/>
</svg>
</div>
<p className="text-sm font-medium text-black/70 dark:text-white/70 mb-1">
No connections yet
</p>
<p className="text-xs text-black/50 dark:text-white/50 text-center max-w-sm mb-4">
Add your first connection to start using AI models. Connect to
OpenAI, Anthropic, Ollama, and more.
</p>
</div>
) : (
providers.map((provider) => (
<ModelProvider <ModelProvider
key={`provider-${provider.id}`} key={`provider-${provider.id}`}
fields={ fields={
@@ -43,7 +81,8 @@ const Models = ({
modelProvider={provider} modelProvider={provider}
setProviders={setProviders} setProviders={setProviders}
/> />
))} ))
)}
</div> </div>
</div> </div>
); );

View File

@@ -67,10 +67,10 @@ const UpdateProvider = ({
}); });
}); });
toast.success('Provider updated successfully.'); toast.success('Connection updated successfully.');
} catch (error) { } catch (error) {
console.error('Error updating provider:', error); console.error('Error updating provider:', error);
toast.error('Failed to update provider.'); toast.error('Failed to update connection.');
} finally { } finally {
setLoading(false); setLoading(false);
setOpen(false); setOpen(false);
@@ -109,8 +109,8 @@ const UpdateProvider = ({
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg"> <DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
<form onSubmit={handleSubmit} className="flex flex-col flex-1"> <form onSubmit={handleSubmit} className="flex flex-col flex-1">
<div className="px-6 pt-6 pb-4"> <div className="px-6 pt-6 pb-4">
<h3 className="text-black/90 dark:text-white/90 font-medium"> <h3 className="text-black/90 dark:text-white/90 font-medium text-sm">
Update provider Update connection
</h3> </h3>
</div> </div>
<div className="border-t border-light-200 dark:border-dark-200" /> <div className="border-t border-light-200 dark:border-dark-200" />
@@ -121,13 +121,13 @@ const UpdateProvider = ({
className="flex flex-col items-start space-y-2" className="flex flex-col items-start space-y-2"
> >
<label className="text-xs text-black/70 dark:text-white/70"> <label className="text-xs text-black/70 dark:text-white/70">
Name* Connection Name*
</label> </label>
<input <input
value={name} value={name}
onChange={(event) => setName(event.target.value)} onChange={(event) => setName(event.target.value)}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder={'Provider Name'} placeholder={'Connection Name'}
type="text" type="text"
required={true} required={true}
/> />
@@ -150,7 +150,7 @@ const UpdateProvider = ({
[field.key]: event.target.value, [field.key]: event.target.value,
})) }))
} }
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder={ placeholder={
(field as StringUIConfigField).placeholder (field as StringUIConfigField).placeholder
} }
@@ -166,12 +166,12 @@ const UpdateProvider = ({
<button <button
type="submit" type="submit"
disabled={loading} disabled={loading}
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200" className="px-4 py-2 rounded-lg text-[13px] bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
> >
{loading ? ( {loading ? (
<Loader2 className="animate-spin" size={16} /> <Loader2 className="animate-spin" size={16} />
) : ( ) : (
'Update Provider' 'Update Connection'
)} )}
</button> </button>
</div> </div>

View File

@@ -0,0 +1,29 @@
import { UIConfigField } from '@/lib/config/types';
import SettingsField from '../SettingsField';
const Personalization = ({
fields,
values,
}: {
fields: UIConfigField[];
values: Record<string, any>;
}) => {
return (
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6">
{fields.map((field) => (
<SettingsField
key={field.key}
field={field}
value={
(field.scope === 'client'
? localStorage.getItem(field.key)
: values[field.key]) ?? field.default
}
dataAdd="personalization"
/>
))}
</div>
);
};
export default Personalization;

View File

@@ -1,7 +1,7 @@
import { UIConfigField } from '@/lib/config/types'; import { UIConfigField } from '@/lib/config/types';
import SettingsField from '../SettingsField'; import SettingsField from '../SettingsField';
const General = ({ const Preferences = ({
fields, fields,
values, values,
}: { }: {
@@ -19,11 +19,11 @@ const General = ({
? localStorage.getItem(field.key) ? localStorage.getItem(field.key)
: values[field.key]) ?? field.default : values[field.key]) ?? field.default
} }
dataAdd="general" dataAdd="preferences"
/> />
))} ))}
</div> </div>
); );
}; };
export default General; export default Preferences;

View File

@@ -0,0 +1,21 @@
import { Settings } from 'lucide-react';
import { useState } from 'react';
import SettingsDialogue from './SettingsDialogue';
import { AnimatePresence } from 'framer-motion';
const SettingsButtonMobile = () => {
const [isOpen, setIsOpen] = useState<boolean>(false);
return (
<>
<button className="lg:hidden" onClick={() => setIsOpen(true)}>
<Settings size={18} />
</button>
<AnimatePresence>
{isOpen && <SettingsDialogue isOpen={isOpen} setIsOpen={setIsOpen} />}
</AnimatePresence>
</>
);
};
export default SettingsButtonMobile;

View File

@@ -1,6 +1,13 @@
import { Dialog, DialogPanel } from '@headlessui/react'; import { Dialog, DialogPanel } from '@headlessui/react';
import { BrainCog, ChevronLeft, Search, Settings } from 'lucide-react'; import {
import General from './Sections/General'; ArrowLeft,
BrainCog,
ChevronLeft,
Search,
Sliders,
ToggleRight,
} from 'lucide-react';
import Preferences from './Sections/Preferences';
import { motion } from 'framer-motion'; import { motion } from 'framer-motion';
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { toast } from 'sonner'; import { toast } from 'sonner';
@@ -8,23 +15,36 @@ import Loader from '../ui/Loader';
import { cn } from '@/lib/utils'; import { cn } from '@/lib/utils';
import Models from './Sections/Models/Section'; import Models from './Sections/Models/Section';
import SearchSection from './Sections/Search'; import SearchSection from './Sections/Search';
import Select from '@/components/ui/Select';
import Personalization from './Sections/Personalization';
const sections = [ const sections = [
{ {
name: 'General', key: 'preferences',
description: 'Adjust common settings.', name: 'Preferences',
icon: Settings, description: 'Customize your application preferences.',
component: General, icon: Sliders,
dataAdd: 'general', component: Preferences,
dataAdd: 'preferences',
}, },
{ {
key: 'personalization',
name: 'Personalization',
description: 'Customize the behavior and tone of the model.',
icon: ToggleRight,
component: Personalization,
dataAdd: 'personalization',
},
{
key: 'models',
name: 'Models', name: 'Models',
description: 'Configure model settings.', description: 'Connect to AI services and manage connections.',
icon: BrainCog, icon: BrainCog,
component: Models, component: Models,
dataAdd: 'modelProviders', dataAdd: 'modelProviders',
}, },
{ {
key: 'search',
name: 'Search', name: 'Search',
description: 'Manage search settings.', description: 'Manage search settings.',
icon: Search, icon: Search,
@@ -42,7 +62,12 @@ const SettingsDialogue = ({
}) => { }) => {
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
const [config, setConfig] = useState<any>(null); const [config, setConfig] = useState<any>(null);
const [activeSection, setActiveSection] = useState(sections[0]); const [activeSection, setActiveSection] = useState<string>(sections[0].key);
const [selectedSection, setSelectedSection] = useState(sections[0]);
useEffect(() => {
setSelectedSection(sections.find((s) => s.key === activeSection)!);
}, [activeSection]);
useEffect(() => { useEffect(() => {
if (isOpen) { if (isOpen) {
@@ -83,14 +108,14 @@ const SettingsDialogue = ({
transition={{ duration: 0.1 }} transition={{ duration: 0.1 }}
className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm h-screen" className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm h-screen"
> >
<DialogPanel className="space-y-4 border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary backdrop-blur-lg rounded-xl h-[calc(100vh-2%)] w-[calc(100vw-2%)] md:h-[calc(100vh-7%)] md:w-[calc(100vw-7%)] lg:h-[calc(100vh-20%)] lg:w-[calc(100vw-30%)]"> <DialogPanel className="space-y-4 border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary backdrop-blur-lg rounded-xl h-[calc(100vh-2%)] w-[calc(100vw-2%)] md:h-[calc(100vh-7%)] md:w-[calc(100vw-7%)] lg:h-[calc(100vh-20%)] lg:w-[calc(100vw-30%)] overflow-hidden flex flex-col">
{isLoading ? ( {isLoading ? (
<div className="flex items-center justify-center h-full w-full"> <div className="flex items-center justify-center h-full w-full">
<Loader /> <Loader />
</div> </div>
) : ( ) : (
<div className="flex flex-1 inset-0 h-full"> <div className="flex flex-1 inset-0 h-full overflow-hidden">
<div className="w-[240px] border-r border-white-200 dark:border-dark-200 h-full px-3 pt-3 flex flex-col"> <div className="hidden lg:flex flex-col w-[240px] border-r border-white-200 dark:border-dark-200 h-full px-3 pt-3 overflow-y-auto">
<button <button
onClick={() => setIsOpen(false)} onClick={() => setIsOpen(false)}
className="group flex flex-row items-center hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg" className="group flex flex-row items-center hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg"
@@ -109,11 +134,11 @@ const SettingsDialogue = ({
key={section.dataAdd} key={section.dataAdd}
className={cn( className={cn(
`flex flex-row items-center space-x-2 px-2 py-1.5 rounded-lg w-full text-sm hover:bg-light-200 hover:dark:bg-dark-200 transition duration-200 active:scale-95`, `flex flex-row items-center space-x-2 px-2 py-1.5 rounded-lg w-full text-sm hover:bg-light-200 hover:dark:bg-dark-200 transition duration-200 active:scale-95`,
activeSection.name === section.name activeSection === section.key
? 'bg-light-200 dark:bg-dark-200 text-black/90 dark:text-white/90' ? 'bg-light-200 dark:bg-dark-200 text-black/90 dark:text-white/90'
: ' text-black/70 dark:text-white/70', : ' text-black/70 dark:text-white/70',
)} )}
onClick={() => setActiveSection(section)} onClick={() => setActiveSection(section.key)}
> >
<section.icon size={17} /> <section.icon size={17} />
<p>{section.name}</p> <p>{section.name}</p>
@@ -121,24 +146,51 @@ const SettingsDialogue = ({
))} ))}
</div> </div>
</div> </div>
<div className="w-full"> <div className="w-full flex flex-col overflow-hidden">
{activeSection.component && ( <div className="flex flex-row lg:hidden w-full justify-between px-[20px] my-4 flex-shrink-0">
<div className="flex h-full flex-col"> <button
<div className="border-b border-light-200/60 px-6 pb-6 pt-8 dark:border-dark-200/60"> onClick={() => setIsOpen(false)}
className="group flex flex-row items-center hover:bg-light-200 hover:dark:bg-dark-200 rounded-lg mr-[40%]"
>
<ArrowLeft
size={18}
className="text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70"
/>
</button>
<Select
options={sections.map((section) => {
return {
value: section.key,
key: section.key,
label: section.name,
};
})}
value={activeSection}
onChange={(e) => {
setActiveSection(e.target.value);
}}
className="!text-xs lg:!text-sm"
/>
</div>
{selectedSection.component && (
<div className="flex flex-1 flex-col overflow-hidden">
<div className="border-b border-light-200/60 px-6 pb-6 lg:pt-6 dark:border-dark-200/60 flex-shrink-0">
<div className="flex flex-col"> <div className="flex flex-col">
<h4 className="font-medium text-black dark:text-white"> <h4 className="font-medium text-black dark:text-white text-sm lg:text-sm">
{activeSection.name} {selectedSection.name}
</h4> </h4>
<p className="text-xs text-black/50 dark:text-white/50"> <p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
{activeSection.description} {selectedSection.description}
</p> </p>
</div> </div>
</div> </div>
<activeSection.component <div className="flex-1 overflow-y-auto">
fields={config.fields[activeSection.dataAdd]} <selectedSection.component
values={config.values[activeSection.dataAdd]} fields={config.fields[selectedSection.dataAdd]}
values={config.values[selectedSection.dataAdd]}
/> />
</div> </div>
</div>
)} )}
</div> </div>
</div> </div>

View File

@@ -1,6 +1,8 @@
import { import {
SelectUIConfigField, SelectUIConfigField,
StringUIConfigField, StringUIConfigField,
SwitchUIConfigField,
TextareaUIConfigField,
UIConfigField, UIConfigField,
} from '@/lib/config/types'; } from '@/lib/config/types';
import { useState } from 'react'; import { useState } from 'react';
@@ -8,6 +10,7 @@ import Select from '../ui/Select';
import { toast } from 'sonner'; import { toast } from 'sonner';
import { useTheme } from 'next-themes'; import { useTheme } from 'next-themes';
import { Loader2 } from 'lucide-react'; import { Loader2 } from 'lucide-react';
import { Switch } from '@headlessui/react';
const SettingsSelect = ({ const SettingsSelect = ({
field, field,
@@ -58,11 +61,13 @@ const SettingsSelect = ({
}; };
return ( return (
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80"> <section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
<div className="space-y-5"> <div className="space-y-3 lg:space-y-5">
<div> <div>
<h4 className="text-base text-black dark:text-white">{field.name}</h4> <h4 className="text-sm lg:text-sm text-black dark:text-white">
<p className="text-xs text-black/50 dark:text-white/50"> {field.name}
</h4>
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
{field.description} {field.description}
</p> </p>
</div> </div>
@@ -73,7 +78,7 @@ const SettingsSelect = ({
value: option.value, value: option.value,
label: option.name, label: option.name,
}))} }))}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60 cursor-pointer capitalize pr-12" className="!text-xs lg:!text-sm"
loading={loading} loading={loading}
disabled={loading} disabled={loading}
/> />
@@ -127,11 +132,13 @@ const SettingsInput = ({
}; };
return ( return (
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80"> <section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
<div className="space-y-5"> <div className="space-y-3 lg:space-y-5">
<div> <div>
<h4 className="text-base text-black dark:text-white">{field.name}</h4> <h4 className="text-sm lg:text-sm text-black dark:text-white">
<p className="text-xs text-black/50 dark:text-white/50"> {field.name}
</h4>
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
{field.description} {field.description}
</p> </p>
</div> </div>
@@ -140,7 +147,7 @@ const SettingsInput = ({
value={value ?? field.default ?? ''} value={value ?? field.default ?? ''}
onChange={(event) => setValue(event.target.value)} onChange={(event) => setValue(event.target.value)}
onBlur={(event) => handleSave(event.target.value)} onBlur={(event) => handleSave(event.target.value)}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60" className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-3 py-2 lg:px-4 lg:py-3 pr-10 !text-xs lg:!text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder={field.placeholder} placeholder={field.placeholder}
type="text" type="text"
disabled={loading} disabled={loading}
@@ -156,6 +163,155 @@ const SettingsInput = ({
); );
}; };
const SettingsTextarea = ({
field,
value,
setValue,
dataAdd,
}: {
field: TextareaUIConfigField;
value?: any;
setValue: (value: any) => void;
dataAdd: string;
}) => {
const [loading, setLoading] = useState(false);
const handleSave = async (newValue: any) => {
setLoading(true);
setValue(newValue);
try {
if (field.scope === 'client') {
localStorage.setItem(field.key, newValue);
} else {
const res = await fetch('/api/config', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
key: `${dataAdd}.${field.key}`,
value: newValue,
}),
});
if (!res.ok) {
console.error('Failed to save config:', await res.text());
throw new Error('Failed to save configuration');
}
}
} catch (error) {
console.error('Error saving config:', error);
toast.error('Failed to save configuration.');
} finally {
setTimeout(() => setLoading(false), 150);
}
};
return (
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
<div className="space-y-3 lg:space-y-5">
<div>
<h4 className="text-sm lg:text-sm text-black dark:text-white">
{field.name}
</h4>
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
{field.description}
</p>
</div>
<div className="relative">
<textarea
value={value ?? field.default ?? ''}
onChange={(event) => setValue(event.target.value)}
onBlur={(event) => handleSave(event.target.value)}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-3 py-2 lg:px-4 lg:py-3 pr-10 !text-xs lg:!text-[13px] text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
placeholder={field.placeholder}
rows={4}
disabled={loading}
/>
{loading && (
<span className="pointer-events-none absolute right-3 translate-y-3 text-black/40 dark:text-white/40">
<Loader2 className="h-4 w-4 animate-spin" />
</span>
)}
</div>
</div>
</section>
);
};
const SettingsSwitch = ({
field,
value,
setValue,
dataAdd,
}: {
field: SwitchUIConfigField;
value?: any;
setValue: (value: any) => void;
dataAdd: string;
}) => {
const [loading, setLoading] = useState(false);
const handleSave = async (newValue: boolean) => {
setLoading(true);
setValue(newValue);
try {
if (field.scope === 'client') {
localStorage.setItem(field.key, String(newValue));
} else {
const res = await fetch('/api/config', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
key: `${dataAdd}.${field.key}`,
value: newValue,
}),
});
if (!res.ok) {
console.error('Failed to save config:', await res.text());
throw new Error('Failed to save configuration');
}
}
} catch (error) {
console.error('Error saving config:', error);
toast.error('Failed to save configuration.');
} finally {
setTimeout(() => setLoading(false), 150);
}
};
const isChecked = value === true || value === 'true';
return (
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-4 lg:p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
<div className="flex flex-row items-center space-x-3 lg:space-x-5 w-full justify-between">
<div>
<h4 className="text-sm lg:text-sm text-black dark:text-white">
{field.name}
</h4>
<p className="text-[11px] lg:text-xs text-black/50 dark:text-white/50">
{field.description}
</p>
</div>
<Switch
checked={isChecked}
onChange={handleSave}
disabled={loading}
className="group relative flex h-6 w-12 shrink-0 cursor-pointer rounded-full bg-white/10 p-1 duration-200 ease-in-out focus:outline-none transition-colors disabled:opacity-60 disabled:cursor-not-allowed data-[checked]:bg-sky-500"
>
<span
aria-hidden="true"
className="pointer-events-none inline-block size-4 translate-x-0 rounded-full bg-white shadow-lg ring-0 transition duration-200 ease-in-out group-data-[checked]:translate-x-6"
/>
</Switch>
</div>
</section>
);
};
const SettingsField = ({ const SettingsField = ({
field, field,
value, value,
@@ -186,6 +342,24 @@ const SettingsField = ({
dataAdd={dataAdd} dataAdd={dataAdd}
/> />
); );
case 'textarea':
return (
<SettingsTextarea
field={field}
value={val}
setValue={setVal}
dataAdd={dataAdd}
/>
);
case 'switch':
return (
<SettingsSwitch
field={field}
value={val}
setValue={setVal}
dataAdd={dataAdd}
/>
);
default: default:
return <div>Unsupported field type: {field.type}</div>; return <div>Unsupported field type: {field.type}</div>;
} }

View File

@@ -9,6 +9,7 @@ import { useEffect, useState } from 'react';
import { toast } from 'sonner'; import { toast } from 'sonner';
import AddProvider from '../Settings/Sections/Models/AddProviderDialog'; import AddProvider from '../Settings/Sections/Models/AddProviderDialog';
import ModelProvider from '../Settings/Sections/Models/ModelProvider'; import ModelProvider from '../Settings/Sections/Models/ModelProvider';
import ModelSelect from '@/components/Settings/Sections/Models/ModelSelect';
const SetupConfig = ({ const SetupConfig = ({
configSections, configSections,
@@ -62,7 +63,11 @@ const SetupConfig = ({
} }
}; };
const hasProviders = providers.length > 0; const visibleProviders = providers.filter(
(p) => p.name.toLowerCase() !== 'transformers',
);
const hasProviders =
visibleProviders.filter((p) => p.chatModels.length > 0).length > 0;
return ( return (
<div className="w-[95vw] md:w-[80vw] lg:w-[65vw] mx-auto px-2 sm:px-4 md:px-6 flex flex-col space-y-6"> <div className="w-[95vw] md:w-[80vw] lg:w-[65vw] mx-auto px-2 sm:px-4 md:px-6 flex flex-col space-y-6">
@@ -80,10 +85,10 @@ const SetupConfig = ({
<div className="flex flex-row justify-between items-center mb-4 md:mb-6 pb-3 md:pb-4 border-b border-light-200 dark:border-dark-200"> <div className="flex flex-row justify-between items-center mb-4 md:mb-6 pb-3 md:pb-4 border-b border-light-200 dark:border-dark-200">
<div> <div>
<p className="text-xs sm:text-sm font-medium text-black dark:text-white"> <p className="text-xs sm:text-sm font-medium text-black dark:text-white">
Manage Providers Manage Connections
</p> </p>
<p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-0.5"> <p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-0.5">
Add and configure your model providers Add connections to access AI models
</p> </p>
</div> </div>
<AddProvider <AddProvider
@@ -99,14 +104,17 @@ const SetupConfig = ({
Loading providers... Loading providers...
</p> </p>
</div> </div>
) : providers.length === 0 ? ( ) : visibleProviders.length === 0 ? (
<div className="flex flex-col items-center justify-center py-8 md:py-12 text-center"> <div className="flex flex-col items-center justify-center py-8 md:py-12 text-center">
<p className="text-xs sm:text-sm font-medium text-black/70 dark:text-white/70"> <p className="text-xs sm:text-sm font-medium text-black/70 dark:text-white/70">
No providers configured No connections configured
</p>
<p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-1">
Click &quot;Add Connection&quot; above to get started
</p> </p>
</div> </div>
) : ( ) : (
providers.map((provider) => ( visibleProviders.map((provider) => (
<ModelProvider <ModelProvider
key={`provider-${provider.id}`} key={`provider-${provider.id}`}
fields={ fields={
@@ -124,8 +132,57 @@ const SetupConfig = ({
</motion.div> </motion.div>
)} )}
{setupState === 3 && (
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{
opacity: 1,
y: 0,
transition: { duration: 0.5, delay: 0.1 },
}}
className="w-full h-[calc(95vh-80px)] bg-light-primary dark:bg-dark-primary border border-light-200 dark:border-dark-200 rounded-xl shadow-sm flex flex-col overflow-hidden"
>
<div className="flex-1 overflow-y-auto px-3 sm:px-4 md:px-6 py-4 md:py-6">
<div className="flex flex-row justify-between items-center mb-4 md:mb-6 pb-3 md:pb-4 border-b border-light-200 dark:border-dark-200">
<div>
<p className="text-xs sm:text-sm font-medium text-black dark:text-white">
Select models
</p>
<p className="text-[10px] sm:text-xs text-black/50 dark:text-white/50 mt-0.5">
Select models which you wish to use.
</p>
</div>
</div>
<div className="space-y-3 md:space-y-4">
<ModelSelect providers={providers} type="chat" />
<ModelSelect providers={providers} type="embedding" />
</div>
</div>
</motion.div>
)}
<div className="flex flex-row items-center justify-between pt-2"> <div className="flex flex-row items-center justify-between pt-2">
<a></a>
{setupState === 2 && ( {setupState === 2 && (
<motion.button
initial={{ opacity: 0, x: 10 }}
animate={{
opacity: 1,
x: 0,
transition: { duration: 0.5 },
}}
onClick={() => {
setSetupState(3);
}}
disabled={!hasProviders || isLoading}
className="flex flex-row items-center gap-1.5 md:gap-2 px-3 md:px-5 py-2 md:py-2.5 rounded-lg bg-[#24A0ED] text-white hover:bg-[#1e8fd1] active:scale-95 transition-all duration-200 font-medium text-xs sm:text-sm disabled:bg-light-200 dark:disabled:bg-dark-200 disabled:text-black/40 dark:disabled:text-white/40 disabled:cursor-not-allowed disabled:active:scale-100"
>
<span>Next</span>
<ArrowRight className="w-4 h-4 md:w-[18px] md:h-[18px]" />
</motion.button>
)}
{setupState === 3 && (
<motion.button <motion.button
initial={{ opacity: 0, x: 10 }} initial={{ opacity: 0, x: 10 }}
animate={{ animate={{

View File

@@ -91,7 +91,7 @@ const WeatherWidget = () => {
setData({ setData({
temperature: data.temperature, temperature: data.temperature,
condition: data.condition, condition: data.condition,
location: location.city, location: 'Mars',
humidity: data.humidity, humidity: data.humidity,
windSpeed: data.windSpeed, windSpeed: data.windSpeed,
icon: data.icon, icon: data.icon,

View File

@@ -0,0 +1,54 @@
'use client';
import { Calculator, Equal } from 'lucide-react';
type CalculationWidgetProps = {
expression: string;
result: number;
};
const Calculation = ({ expression, result }: CalculationWidgetProps) => {
return (
<div className="rounded-lg bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 overflow-hidden shadow-sm">
<div className="flex items-center gap-2 p-3 bg-light-100/50 dark:bg-dark-100/50 border-b border-light-200 dark:border-dark-200">
<div className="rounded-full p-1.5 bg-light-100 dark:bg-dark-100">
<Calculator className="w-4 h-4 text-black/70 dark:text-white/70" />
</div>
<span className="text-sm font-medium text-black dark:text-white">
Calculation
</span>
</div>
<div className="p-4 space-y-3">
<div>
<div className="flex items-center gap-1.5 mb-1.5">
<span className="text-xs text-black/50 dark:text-white/50 font-medium">
Expression
</span>
</div>
<div className="bg-light-100 dark:bg-dark-100 rounded-md p-2.5 border border-light-200 dark:border-dark-200">
<code className="text-sm text-black dark:text-white font-mono break-all">
{expression}
</code>
</div>
</div>
<div>
<div className="flex items-center gap-1.5 mb-1.5">
<Equal className="w-3.5 h-3.5 text-black/50 dark:text-white/50" />
<span className="text-xs text-black/50 dark:text-white/50 font-medium">
Result
</span>
</div>
<div className="bg-gradient-to-br from-light-100 to-light-secondary dark:from-dark-100 dark:to-dark-secondary rounded-md p-4 border-2 border-light-200 dark:border-dark-200">
<div className="text-4xl font-bold text-black dark:text-white font-mono tabular-nums">
{result.toLocaleString()}
</div>
</div>
</div>
</div>
</div>
);
};
export default Calculation;

View File

@@ -0,0 +1,76 @@
import React from 'react';
import { Widget } from '../ChatWindow';
import Weather from './Weather';
import Calculation from './Calculation';
import Stock from './Stock';
const Renderer = ({ widgets }: { widgets: Widget[] }) => {
return widgets.map((widget, index) => {
switch (widget.widgetType) {
case 'weather':
return (
<Weather
key={index}
location={widget.params.location}
current={widget.params.current}
daily={widget.params.daily}
timezone={widget.params.timezone}
/>
);
case 'calculation_result':
return (
<Calculation
expression={widget.params.expression}
result={widget.params.result}
key={index}
/>
);
case 'stock':
return (
<Stock
key={index}
symbol={widget.params.symbol}
shortName={widget.params.shortName}
longName={widget.params.longName}
exchange={widget.params.exchange}
currency={widget.params.currency}
marketState={widget.params.marketState}
regularMarketPrice={widget.params.regularMarketPrice}
regularMarketChange={widget.params.regularMarketChange}
regularMarketChangePercent={
widget.params.regularMarketChangePercent
}
regularMarketPreviousClose={
widget.params.regularMarketPreviousClose
}
regularMarketOpen={widget.params.regularMarketOpen}
regularMarketDayHigh={widget.params.regularMarketDayHigh}
regularMarketDayLow={widget.params.regularMarketDayLow}
regularMarketVolume={widget.params.regularMarketVolume}
averageDailyVolume3Month={widget.params.averageDailyVolume3Month}
marketCap={widget.params.marketCap}
fiftyTwoWeekLow={widget.params.fiftyTwoWeekLow}
fiftyTwoWeekHigh={widget.params.fiftyTwoWeekHigh}
trailingPE={widget.params.trailingPE}
forwardPE={widget.params.forwardPE}
dividendYield={widget.params.dividendYield}
earningsPerShare={widget.params.earningsPerShare}
website={widget.params.website}
postMarketPrice={widget.params.postMarketPrice}
postMarketChange={widget.params.postMarketChange}
postMarketChangePercent={widget.params.postMarketChangePercent}
preMarketPrice={widget.params.preMarketPrice}
preMarketChange={widget.params.preMarketChange}
preMarketChangePercent={widget.params.preMarketChangePercent}
chartData={widget.params.chartData}
comparisonData={widget.params.comparisonData}
error={widget.params.error}
/>
);
default:
return <div key={index}>Unknown widget type: {widget.widgetType}</div>;
}
});
};
export default Renderer;

View File

@@ -0,0 +1,517 @@
'use client';
import { Clock, ArrowUpRight, ArrowDownRight, Minus } from 'lucide-react';
import { useEffect, useRef, useState } from 'react';
import {
createChart,
ColorType,
LineStyle,
BaselineSeries,
LineSeries,
} from 'lightweight-charts';
type StockWidgetProps = {
symbol: string;
shortName: string;
longName?: string;
exchange?: string;
currency?: string;
marketState?: string;
regularMarketPrice?: number;
regularMarketChange?: number;
regularMarketChangePercent?: number;
regularMarketPreviousClose?: number;
regularMarketOpen?: number;
regularMarketDayHigh?: number;
regularMarketDayLow?: number;
regularMarketVolume?: number;
averageDailyVolume3Month?: number;
marketCap?: number;
fiftyTwoWeekLow?: number;
fiftyTwoWeekHigh?: number;
trailingPE?: number;
forwardPE?: number;
dividendYield?: number;
earningsPerShare?: number;
website?: string;
postMarketPrice?: number;
postMarketChange?: number;
postMarketChangePercent?: number;
preMarketPrice?: number;
preMarketChange?: number;
preMarketChangePercent?: number;
chartData?: {
'1D'?: { timestamps: number[]; prices: number[] } | null;
'5D'?: { timestamps: number[]; prices: number[] } | null;
'1M'?: { timestamps: number[]; prices: number[] } | null;
'3M'?: { timestamps: number[]; prices: number[] } | null;
'6M'?: { timestamps: number[]; prices: number[] } | null;
'1Y'?: { timestamps: number[]; prices: number[] } | null;
MAX?: { timestamps: number[]; prices: number[] } | null;
} | null;
comparisonData?: Array<{
ticker: string;
name: string;
chartData: {
'1D'?: { timestamps: number[]; prices: number[] } | null;
'5D'?: { timestamps: number[]; prices: number[] } | null;
'1M'?: { timestamps: number[]; prices: number[] } | null;
'3M'?: { timestamps: number[]; prices: number[] } | null;
'6M'?: { timestamps: number[]; prices: number[] } | null;
'1Y'?: { timestamps: number[]; prices: number[] } | null;
MAX?: { timestamps: number[]; prices: number[] } | null;
};
}> | null;
error?: string;
};
const formatNumber = (num: number | undefined, decimals = 2): string => {
if (num === undefined || num === null) return 'N/A';
return num.toLocaleString(undefined, {
minimumFractionDigits: decimals,
maximumFractionDigits: decimals,
});
};
const formatLargeNumber = (num: number | undefined): string => {
if (num === undefined || num === null) return 'N/A';
if (num >= 1e12) return `$${(num / 1e12).toFixed(2)}T`;
if (num >= 1e9) return `$${(num / 1e9).toFixed(2)}B`;
if (num >= 1e6) return `$${(num / 1e6).toFixed(2)}M`;
if (num >= 1e3) return `$${(num / 1e3).toFixed(2)}K`;
return `$${num.toFixed(2)}`;
};
const Stock = (props: StockWidgetProps) => {
const [isDarkMode, setIsDarkMode] = useState(false);
const [selectedTimeframe, setSelectedTimeframe] = useState<
'1D' | '5D' | '1M' | '3M' | '6M' | '1Y' | 'MAX'
>('1M');
const chartContainerRef = useRef<HTMLDivElement>(null);
useEffect(() => {
const checkDarkMode = () => {
setIsDarkMode(document.documentElement.classList.contains('dark'));
};
checkDarkMode();
const observer = new MutationObserver(checkDarkMode);
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['class'],
});
return () => observer.disconnect();
}, []);
useEffect(() => {
const currentChartData = props.chartData?.[selectedTimeframe];
if (
!chartContainerRef.current ||
!currentChartData ||
currentChartData.timestamps.length === 0
) {
return;
}
const chart = createChart(chartContainerRef.current, {
width: chartContainerRef.current.clientWidth,
height: 280,
layout: {
background: { type: ColorType.Solid, color: 'transparent' },
textColor: isDarkMode ? '#6b7280' : '#9ca3af',
fontSize: 11,
attributionLogo: false,
},
grid: {
vertLines: {
color: isDarkMode ? '#21262d' : '#e8edf1',
style: LineStyle.Solid,
},
horzLines: {
color: isDarkMode ? '#21262d' : '#e8edf1',
style: LineStyle.Solid,
},
},
crosshair: {
vertLine: {
color: isDarkMode ? '#30363d' : '#d0d7de',
labelVisible: false,
},
horzLine: {
color: isDarkMode ? '#30363d' : '#d0d7de',
labelVisible: true,
},
},
rightPriceScale: {
borderVisible: false,
visible: false,
},
leftPriceScale: {
borderVisible: false,
visible: true,
},
timeScale: {
borderVisible: false,
timeVisible: false,
},
handleScroll: false,
handleScale: false,
});
const prices = currentChartData.prices;
let baselinePrice: number;
if (selectedTimeframe === '1D') {
baselinePrice = props.regularMarketPreviousClose ?? prices[0];
} else {
baselinePrice = prices[0];
}
const baselineSeries = chart.addSeries(BaselineSeries);
baselineSeries.applyOptions({
baseValue: { type: 'price', price: baselinePrice },
topLineColor: isDarkMode ? '#14b8a6' : '#0d9488',
topFillColor1: isDarkMode
? 'rgba(20, 184, 166, 0.28)'
: 'rgba(13, 148, 136, 0.24)',
topFillColor2: isDarkMode
? 'rgba(20, 184, 166, 0.05)'
: 'rgba(13, 148, 136, 0.05)',
bottomLineColor: isDarkMode ? '#f87171' : '#dc2626',
bottomFillColor1: isDarkMode
? 'rgba(248, 113, 113, 0.05)'
: 'rgba(220, 38, 38, 0.05)',
bottomFillColor2: isDarkMode
? 'rgba(248, 113, 113, 0.28)'
: 'rgba(220, 38, 38, 0.24)',
lineWidth: 2,
crosshairMarkerVisible: true,
crosshairMarkerRadius: 4,
crosshairMarkerBorderColor: '',
crosshairMarkerBackgroundColor: '',
});
const data = currentChartData.timestamps.map((timestamp, index) => {
const price = currentChartData.prices[index];
return {
time: (timestamp / 1000) as any,
value: price,
};
});
baselineSeries.setData(data);
const comparisonColors = ['#8b5cf6', '#f59e0b', '#ec4899'];
if (props.comparisonData && props.comparisonData.length > 0) {
props.comparisonData.forEach((comp, index) => {
const compChartData = comp.chartData[selectedTimeframe];
if (compChartData && compChartData.prices.length > 0) {
const compData = compChartData.timestamps.map((timestamp, i) => ({
time: (timestamp / 1000) as any,
value: compChartData.prices[i],
}));
const compSeries = chart.addSeries(LineSeries);
compSeries.applyOptions({
color: comparisonColors[index] || '#6b7280',
lineWidth: 2,
crosshairMarkerVisible: true,
crosshairMarkerRadius: 4,
priceScaleId: 'left',
});
compSeries.setData(compData);
}
});
}
chart.timeScale().fitContent();
const handleResize = () => {
if (chartContainerRef.current) {
chart.applyOptions({
width: chartContainerRef.current.clientWidth,
});
}
};
window.addEventListener('resize', handleResize);
return () => {
window.removeEventListener('resize', handleResize);
chart.remove();
};
}, [
props.chartData,
props.comparisonData,
selectedTimeframe,
isDarkMode,
props.regularMarketPreviousClose,
]);
const isPositive = (props.regularMarketChange ?? 0) >= 0;
const isMarketOpen = props.marketState === 'REGULAR';
const isPreMarket = props.marketState === 'PRE';
const isPostMarket = props.marketState === 'POST';
const displayPrice = isPostMarket
? props.postMarketPrice ?? props.regularMarketPrice
: isPreMarket
? props.preMarketPrice ?? props.regularMarketPrice
: props.regularMarketPrice;
const displayChange = isPostMarket
? props.postMarketChange ?? props.regularMarketChange
: isPreMarket
? props.preMarketChange ?? props.regularMarketChange
: props.regularMarketChange;
const displayChangePercent = isPostMarket
? props.postMarketChangePercent ?? props.regularMarketChangePercent
: isPreMarket
? props.preMarketChangePercent ?? props.regularMarketChangePercent
: props.regularMarketChangePercent;
const changeColor = isPositive
? 'text-green-600 dark:text-green-400'
: 'text-red-600 dark:text-red-400';
if (props.error) {
return (
<div className="rounded-lg bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-4">
<p className="text-sm text-black dark:text-white">
Error: {props.error}
</p>
</div>
);
}
return (
<div className="rounded-lg border border-light-200 dark:border-dark-200 overflow-hidden">
<div className="p-4 space-y-4">
<div className="flex items-start justify-between gap-4 pb-4 border-b border-light-200 dark:border-dark-200">
<div className="flex-1">
<div className="flex items-center gap-2 mb-1">
{props.website && (
<img
src={`https://logo.clearbit.com/${new URL(props.website).hostname}`}
alt={`${props.symbol} logo`}
className="w-8 h-8 rounded-lg"
onError={(e) => {
(e.target as HTMLImageElement).style.display = 'none';
}}
/>
)}
<h3 className="text-2xl font-bold text-black dark:text-white">
{props.symbol}
</h3>
{props.exchange && (
<span className="px-2 py-0.5 text-xs font-medium rounded bg-light-100 dark:bg-dark-100 text-black/60 dark:text-white/60">
{props.exchange}
</span>
)}
{isMarketOpen && (
<div className="flex items-center gap-1.5 px-2 py-0.5 rounded-full bg-green-100 dark:bg-green-950/40 border border-green-300 dark:border-green-800">
<div className="w-1.5 h-1.5 rounded-full bg-green-500 animate-pulse" />
<span className="text-xs font-medium text-green-700 dark:text-green-400">
Live
</span>
</div>
)}
{isPreMarket && (
<div className="flex items-center gap-1.5 px-2 py-0.5 rounded-full bg-blue-100 dark:bg-blue-950/40 border border-blue-300 dark:border-blue-800">
<Clock className="w-3 h-3 text-blue-600 dark:text-blue-400" />
<span className="text-xs font-medium text-blue-700 dark:text-blue-400">
Pre-Market
</span>
</div>
)}
{isPostMarket && (
<div className="flex items-center gap-1.5 px-2 py-0.5 rounded-full bg-orange-100 dark:bg-orange-950/40 border border-orange-300 dark:border-orange-800">
<Clock className="w-3 h-3 text-orange-600 dark:text-orange-400" />
<span className="text-xs font-medium text-orange-700 dark:text-orange-400">
After Hours
</span>
</div>
)}
</div>
<p className="text-sm text-black/60 dark:text-white/60">
{props.longName || props.shortName}
</p>
</div>
<div className="text-right">
<div className="flex items-baseline gap-2 mb-1">
<span className="text-3xl font-medium text-black dark:text-white">
{props.currency === 'USD' ? '$' : ''}
{formatNumber(displayPrice)}
</span>
</div>
<div
className={`flex items-center justify-end gap-1 ${changeColor}`}
>
{isPositive ? (
<ArrowUpRight className="w-4 h-4" />
) : displayChange === 0 ? (
<Minus className="w-4 h-4" />
) : (
<ArrowDownRight className="w-4 h-4" />
)}
<span className="text-lg font-normal">
{displayChange !== undefined && displayChange >= 0 ? '+' : ''}
{formatNumber(displayChange)}
</span>
<span className="text-sm font-normal">
(
{displayChangePercent !== undefined && displayChangePercent >= 0
? '+'
: ''}
{formatNumber(displayChangePercent)}%)
</span>
</div>
</div>
</div>
{props.chartData && (
<div className="bg-light-secondary dark:bg-dark-secondary rounded-lg overflow-hidden">
<div className="flex items-center justify-between p-3 border-b border-light-200 dark:border-dark-200">
<div className="flex items-center gap-1">
{(['1D', '5D', '1M', '3M', '6M', '1Y', 'MAX'] as const).map(
(timeframe) => (
<button
key={timeframe}
onClick={() => setSelectedTimeframe(timeframe)}
disabled={!props.chartData?.[timeframe]}
className={`px-3 py-1.5 text-xs font-medium rounded transition-colors ${
selectedTimeframe === timeframe
? 'bg-black/10 dark:bg-white/10 text-black dark:text-white'
: 'text-black/50 dark:text-white/50 hover:text-black/80 dark:hover:text-white/80'
} disabled:opacity-30 disabled:cursor-not-allowed`}
>
{timeframe}
</button>
),
)}
</div>
{props.comparisonData && props.comparisonData.length > 0 && (
<div className="flex items-center gap-3 ml-auto">
<span className="text-xs text-black/50 dark:text-white/50">
{props.symbol}
</span>
{props.comparisonData.map((comp, index) => {
const colors = ['#8b5cf6', '#f59e0b', '#ec4899'];
return (
<div
key={comp.ticker}
className="flex items-center gap-1.5"
>
<div
className="w-2 h-2 rounded-full"
style={{ backgroundColor: colors[index] }}
/>
<span className="text-xs text-black/70 dark:text-white/70">
{comp.ticker}
</span>
</div>
);
})}
</div>
)}
</div>
<div className="p-4">
<div ref={chartContainerRef} />
</div>
<div className="grid grid-cols-3 border-t border-light-200 dark:border-dark-200">
<div className="flex justify-between p-3 border-r border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
Prev Close
</span>
<span className="text-xs text-black dark:text-white font-medium">
${formatNumber(props.regularMarketPreviousClose)}
</span>
</div>
<div className="flex justify-between p-3 border-r border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
52W Range
</span>
<span className="text-xs text-black dark:text-white font-medium">
${formatNumber(props.fiftyTwoWeekLow, 2)}-$
{formatNumber(props.fiftyTwoWeekHigh, 2)}
</span>
</div>
<div className="flex justify-between p-3">
<span className="text-xs text-black/50 dark:text-white/50">
Market Cap
</span>
<span className="text-xs text-black dark:text-white font-medium">
{formatLargeNumber(props.marketCap)}
</span>
</div>
<div className="flex justify-between p-3 border-t border-r border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
Open
</span>
<span className="text-xs text-black dark:text-white font-medium">
${formatNumber(props.regularMarketOpen)}
</span>
</div>
<div className="flex justify-between p-3 border-t border-r border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
P/E Ratio
</span>
<span className="text-xs text-black dark:text-white font-medium">
{props.trailingPE ? formatNumber(props.trailingPE, 2) : 'N/A'}
</span>
</div>
<div className="flex justify-between p-3 border-t border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
Dividend Yield
</span>
<span className="text-xs text-black dark:text-white font-medium">
{props.dividendYield
? `${formatNumber(props.dividendYield * 100, 2)}%`
: 'N/A'}
</span>
</div>
<div className="flex justify-between p-3 border-t border-r border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
Day Range
</span>
<span className="text-xs text-black dark:text-white font-medium">
${formatNumber(props.regularMarketDayLow, 2)}-$
{formatNumber(props.regularMarketDayHigh, 2)}
</span>
</div>
<div className="flex justify-between p-3 border-t border-r border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
Volume
</span>
<span className="text-xs text-black dark:text-white font-medium">
{formatLargeNumber(props.regularMarketVolume)}
</span>
</div>
<div className="flex justify-between p-3 border-t border-light-200 dark:border-dark-200">
<span className="text-xs text-black/50 dark:text-white/50">
EPS
</span>
<span className="text-xs text-black dark:text-white font-medium">
$
{props.earningsPerShare
? formatNumber(props.earningsPerShare, 2)
: 'N/A'}
</span>
</div>
</div>
</div>
)}
</div>
</div>
);
};
export default Stock;

View File

@@ -0,0 +1,408 @@
'use client';
import { Wind, Droplets, Gauge } from 'lucide-react';
import { useMemo, useEffect, useState } from 'react';
type WeatherWidgetProps = {
location: string;
current: {
time: string;
temperature_2m: number;
relative_humidity_2m: number;
apparent_temperature: number;
is_day: number;
precipitation: number;
weather_code: number;
wind_speed_10m: number;
wind_direction_10m: number;
wind_gusts_10m?: number;
};
daily: {
time: string[];
weather_code: number[];
temperature_2m_max: number[];
temperature_2m_min: number[];
precipitation_probability_max: number[];
};
timezone: string;
};
const getWeatherInfo = (code: number, isDay: boolean, isDarkMode: boolean) => {
const dayNight = isDay ? 'day' : 'night';
const weatherMap: Record<
number,
{ icon: string; description: string; gradient: string }
> = {
0: {
icon: `clear-${dayNight}.svg`,
description: 'Clear',
gradient: isDarkMode
? isDay
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #E8F1FA, #7A9DBF 35%, #4A7BA8 60%, #2F5A88)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #5A6A7E, #3E4E63 40%, #2A3544 65%, #1A2230)'
: isDay
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #DBEAFE 30%, #93C5FD 60%, #60A5FA)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #7B8694, #475569 45%, #334155 70%, #1E293B)',
},
1: {
icon: `clear-${dayNight}.svg`,
description: 'Mostly Clear',
gradient: isDarkMode
? isDay
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #E8F1FA, #7A9DBF 35%, #4A7BA8 60%, #2F5A88)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #5A6A7E, #3E4E63 40%, #2A3544 65%, #1A2230)'
: isDay
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #DBEAFE 30%, #93C5FD 60%, #60A5FA)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #7B8694, #475569 45%, #334155 70%, #1E293B)',
},
2: {
icon: `cloudy-1-${dayNight}.svg`,
description: 'Partly Cloudy',
gradient: isDarkMode
? isDay
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #D4E1ED, #8BA3B8 35%, #617A93 60%, #426070)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #6B7583, #4A5563 40%, #3A4450 65%, #2A3340)'
: isDay
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #E0F2FE 28%, #BFDBFE 58%, #93C5FD)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #8B99AB, #64748B 45%, #475569 70%, #334155)',
},
3: {
icon: `cloudy-1-${dayNight}.svg`,
description: 'Cloudy',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #B8C3CF, #758190 38%, #546270 65%, #3D4A58)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #F5F8FA, #CBD5E1 32%, #94A3B8 65%, #64748B)',
},
45: {
icon: `fog-${dayNight}.svg`,
description: 'Foggy',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #C5CDD8, #8892A0 38%, #697380 65%, #4F5A68)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #E2E8F0 30%, #CBD5E1 62%, #94A3B8)',
},
48: {
icon: `fog-${dayNight}.svg`,
description: 'Rime Fog',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #C5CDD8, #8892A0 38%, #697380 65%, #4F5A68)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #E2E8F0 30%, #CBD5E1 62%, #94A3B8)',
},
51: {
icon: `rainy-1-${dayNight}.svg`,
description: 'Light Drizzle',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #B8D4E5, #6FA4C5 35%, #4A85AC 60%, #356A8E)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #E5FBFF, #A5F3FC 28%, #67E8F9 60%, #22D3EE)',
},
53: {
icon: `rainy-1-${dayNight}.svg`,
description: 'Drizzle',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #B8D4E5, #6FA4C5 35%, #4A85AC 60%, #356A8E)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #E5FBFF, #A5F3FC 28%, #67E8F9 60%, #22D3EE)',
},
55: {
icon: `rainy-2-${dayNight}.svg`,
description: 'Heavy Drizzle',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #A5C5D8, #5E92B0 35%, #3F789D 60%, #2A5F82)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #D4F3FF, #7DD3FC 30%, #38BDF8 62%, #0EA5E9)',
},
61: {
icon: `rainy-2-${dayNight}.svg`,
description: 'Light Rain',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #A5C5D8, #5E92B0 35%, #3F789D 60%, #2A5F82)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #D4F3FF, #7DD3FC 30%, #38BDF8 62%, #0EA5E9)',
},
63: {
icon: `rainy-2-${dayNight}.svg`,
description: 'Rain',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #8DB3C8, #4D819F 38%, #326A87 65%, #215570)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #B8E8FF, #38BDF8 32%, #0EA5E9 65%, #0284C7)',
},
65: {
icon: `rainy-3-${dayNight}.svg`,
description: 'Heavy Rain',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #7BA3B8, #3D6F8A 38%, #295973 65%, #1A455D)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #9CD9F5, #0EA5E9 32%, #0284C7 65%, #0369A1)',
},
71: {
icon: `snowy-1-${dayNight}.svg`,
description: 'Light Snow',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #E5F0FA, #9BB5CE 32%, #7496B8 58%, #527A9E)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #F0F9FF 25%, #E0F2FE 55%, #BAE6FD)',
},
73: {
icon: `snowy-2-${dayNight}.svg`,
description: 'Snow',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #D4E5F3, #85A1BD 35%, #6584A8 60%, #496A8E)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #FAFEFF, #E0F2FE 28%, #BAE6FD 60%, #7DD3FC)',
},
75: {
icon: `snowy-3-${dayNight}.svg`,
description: 'Heavy Snow',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #BDD8EB, #6F92AE 35%, #4F7593 60%, #365A78)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #F0FAFF, #BAE6FD 30%, #7DD3FC 62%, #38BDF8)',
},
77: {
icon: `snowy-1-${dayNight}.svg`,
description: 'Snow Grains',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #E5F0FA, #9BB5CE 32%, #7496B8 58%, #527A9E)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #FFFFFF, #F0F9FF 25%, #E0F2FE 55%, #BAE6FD)',
},
80: {
icon: `rainy-2-${dayNight}.svg`,
description: 'Light Showers',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #A5C5D8, #5E92B0 35%, #3F789D 60%, #2A5F82)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #D4F3FF, #7DD3FC 30%, #38BDF8 62%, #0EA5E9)',
},
81: {
icon: `rainy-2-${dayNight}.svg`,
description: 'Showers',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #8DB3C8, #4D819F 38%, #326A87 65%, #215570)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #B8E8FF, #38BDF8 32%, #0EA5E9 65%, #0284C7)',
},
82: {
icon: `rainy-3-${dayNight}.svg`,
description: 'Heavy Showers',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #7BA3B8, #3D6F8A 38%, #295973 65%, #1A455D)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #9CD9F5, #0EA5E9 32%, #0284C7 65%, #0369A1)',
},
85: {
icon: `snowy-2-${dayNight}.svg`,
description: 'Light Snow Showers',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #D4E5F3, #85A1BD 35%, #6584A8 60%, #496A8E)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #FAFEFF, #E0F2FE 28%, #BAE6FD 60%, #7DD3FC)',
},
86: {
icon: `snowy-3-${dayNight}.svg`,
description: 'Snow Showers',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #BDD8EB, #6F92AE 35%, #4F7593 60%, #365A78)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #F0FAFF, #BAE6FD 30%, #7DD3FC 62%, #38BDF8)',
},
95: {
icon: `scattered-thunderstorms-${dayNight}.svg`,
description: 'Thunderstorm',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #8A95A3, #5F6A7A 38%, #475260 65%, #2F3A48)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #C8D1DD, #94A3B8 32%, #64748B 65%, #475569)',
},
96: {
icon: 'severe-thunderstorm.svg',
description: 'Thunderstorm + Hail',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #7A8593, #515C6D 38%, #3A4552 65%, #242D3A)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #B0BBC8, #64748B 32%, #475569 65%, #334155)',
},
99: {
icon: 'severe-thunderstorm.svg',
description: 'Severe Thunderstorm',
gradient: isDarkMode
? 'radial-gradient(ellipse 150% 100% at 50% 100%, #6A7583, #434E5D 40%, #2F3A47 68%, #1C2530)'
: 'radial-gradient(ellipse 150% 100% at 50% 100%, #9BA8B8, #475569 35%, #334155 68%, #1E293B)',
},
};
return weatherMap[code] || weatherMap[0];
};
const Weather = ({
location,
current,
daily,
timezone,
}: WeatherWidgetProps) => {
const [isDarkMode, setIsDarkMode] = useState(false);
useEffect(() => {
const checkDarkMode = () => {
setIsDarkMode(document.documentElement.classList.contains('dark'));
};
checkDarkMode();
const observer = new MutationObserver(checkDarkMode);
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['class'],
});
return () => observer.disconnect();
}, []);
const weatherInfo = useMemo(
() =>
getWeatherInfo(
current?.weather_code || 0,
current?.is_day === 1,
isDarkMode,
),
[current?.weather_code, current?.is_day, isDarkMode],
);
const forecast = useMemo(() => {
if (!daily?.time || daily.time.length === 0) return [];
return daily.time.slice(1, 7).map((time, idx) => {
const date = new Date(time);
const dayName = date.toLocaleDateString('en-US', { weekday: 'short' });
const isDay = true;
const weatherCode = daily.weather_code[idx + 1];
const info = getWeatherInfo(weatherCode, isDay, isDarkMode);
return {
day: dayName,
icon: info.icon,
high: Math.round(daily.temperature_2m_max[idx + 1]),
low: Math.round(daily.temperature_2m_min[idx + 1]),
highF: Math.round((daily.temperature_2m_max[idx + 1] * 9) / 5 + 32),
lowF: Math.round((daily.temperature_2m_min[idx + 1] * 9) / 5 + 32),
precipitation: daily.precipitation_probability_max[idx + 1] || 0,
};
});
}, [daily, isDarkMode]);
if (!current || !daily || !daily.time || daily.time.length === 0) {
return (
<div className="relative overflow-hidden rounded-lg shadow-md bg-gray-200 dark:bg-gray-800">
<div className="p-4 text-black dark:text-white">
<p className="text-sm">Weather data unavailable for {location}</p>
</div>
</div>
);
}
return (
<div className="relative overflow-hidden rounded-lg shadow-md">
<div
className="absolute inset-0"
style={{
background: weatherInfo.gradient,
}}
/>
<div className="relative p-4 text-gray-800 dark:text-white">
<div className="flex items-start justify-between mb-3">
<div className="flex items-center gap-3">
<img
src={`/weather-ico/${weatherInfo.icon}`}
alt={weatherInfo.description}
className="w-16 h-16 drop-shadow-lg"
/>
<div>
<div className="flex items-baseline gap-1">
<span className="text-4xl font-bold drop-shadow-md">
{Math.round(current.temperature_2m)}°
</span>
<span className="text-lg">F C</span>
</div>
<p className="text-sm font-medium drop-shadow mt-0.5">
{weatherInfo.description}
</p>
</div>
</div>
<div className="text-right">
<p className="text-xs font-medium opacity-90">
{Math.round(daily.temperature_2m_max[0])}°{' '}
{Math.round(daily.temperature_2m_min[0])}°
</p>
</div>
</div>
<div className="mb-3 pb-3 border-b border-gray-800/20 dark:border-white/20">
<h3 className="text-base font-semibold drop-shadow-md">{location}</h3>
<p className="text-xs text-gray-700 dark:text-white/80 drop-shadow mt-0.5">
{new Date(current.time).toLocaleString('en-US', {
weekday: 'short',
hour: 'numeric',
minute: '2-digit',
})}
</p>
</div>
<div className="grid grid-cols-6 gap-2 mb-3 pb-3 border-b border-gray-800/20 dark:border-white/20">
{forecast.map((day, idx) => (
<div
key={idx}
className="flex flex-col items-center bg-gray-800/10 dark:bg-white/10 backdrop-blur-sm rounded-md p-2"
>
<p className="text-xs font-medium mb-1">{day.day}</p>
<img
src={`/weather-ico/${day.icon}`}
alt=""
className="w-8 h-8 mb-1"
/>
<div className="flex items-center gap-1 text-xs">
<span className="font-semibold">{day.high}°</span>
<span className="text-gray-600 dark:text-white/60">
{day.low}°
</span>
</div>
{day.precipitation > 0 && (
<div className="flex items-center gap-0.5 mt-1">
<Droplets className="w-3 h-3 text-gray-600 dark:text-white/70" />
<span className="text-[10px] text-gray-600 dark:text-white/70">
{day.precipitation}%
</span>
</div>
)}
</div>
))}
</div>
<div className="grid grid-cols-3 gap-2 text-xs">
<div className="flex items-center gap-2 bg-gray-800/10 dark:bg-white/10 backdrop-blur-sm rounded-md p-2">
<Wind className="w-4 h-4 text-gray-700 dark:text-white/80 flex-shrink-0" />
<div>
<p className="text-[10px] text-gray-600 dark:text-white/70">
Wind
</p>
<p className="font-semibold">
{Math.round(current.wind_speed_10m)} km/h
</p>
</div>
</div>
<div className="flex items-center gap-2 bg-gray-800/10 dark:bg-white/10 backdrop-blur-sm rounded-md p-2">
<Droplets className="w-4 h-4 text-gray-700 dark:text-white/80 flex-shrink-0" />
<div>
<p className="text-[10px] text-gray-600 dark:text-white/70">
Humidity
</p>
<p className="font-semibold">
{Math.round(current.relative_humidity_2m)}%
</p>
</div>
</div>
<div className="flex items-center gap-2 bg-gray-800/10 dark:bg-white/10 backdrop-blur-sm rounded-md p-2">
<Gauge className="w-4 h-4 text-gray-700 dark:text-white/80 flex-shrink-0" />
<div>
<p className="text-[10px] text-gray-600 dark:text-white/70">
Feels Like
</p>
<p className="font-semibold">
{Math.round(current.apparent_temperature)}°C
</p>
</div>
</div>
</div>
</div>
</div>
);
};
export default Weather;

View File

@@ -3,7 +3,7 @@ import { Loader2, ChevronDown } from 'lucide-react';
import { SelectHTMLAttributes, forwardRef } from 'react'; import { SelectHTMLAttributes, forwardRef } from 'react';
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> { interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
options: { value: string; label: string; disabled?: boolean }[]; options: { value: any; label: string; disabled?: boolean }[];
loading?: boolean; loading?: boolean;
} }
@@ -21,7 +21,7 @@ export const Select = forwardRef<HTMLSelectElement, SelectProps>(
ref={ref} ref={ref}
disabled={disabled || loading} disabled={disabled || loading}
className={cn( className={cn(
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm appearance-none w-full pr-10', 'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg appearance-none w-full pr-10 text-xs lg:text-sm',
className, className,
)} )}
> >

View File

@@ -1,6 +1,14 @@
import { Message } from '@/components/ChatWindow'; import { Message } from '@/components/ChatWindow';
export const getSuggestions = async (chatHistory: Message[]) => { export const getSuggestions = async (chatHistory: [string, string][]) => {
const chatTurns = chatHistory.map(([role, content]) => {
if (role === 'human') {
return { role: 'user', content };
} else {
return { role: 'assistant', content };
}
});
const chatModel = localStorage.getItem('chatModelKey'); const chatModel = localStorage.getItem('chatModelKey');
const chatModelProvider = localStorage.getItem('chatModelProviderId'); const chatModelProvider = localStorage.getItem('chatModelProviderId');
@@ -10,7 +18,7 @@ export const getSuggestions = async (chatHistory: Message[]) => {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ body: JSON.stringify({
chatHistory: chatHistory, chatHistory: chatTurns,
chatModel: { chatModel: {
providerId: chatModelProvider, providerId: chatModelProvider,
key: chatModel, key: chatModel,

View File

@@ -0,0 +1,66 @@
/* I don't think can be classified as agents but to keep the structure consistent i guess ill keep it here */
import { searchSearxng } from '@/lib/searxng';
import {
imageSearchFewShots,
imageSearchPrompt,
} from '@/lib/prompts/media/image';
import BaseLLM from '@/lib/models/base/llm';
import z from 'zod';
import { ChatTurnMessage } from '@/lib/types';
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
type ImageSearchChainInput = {
chatHistory: ChatTurnMessage[];
query: string;
};
type ImageSearchResult = {
img_src: string;
url: string;
title: string;
};
const searchImages = async (
input: ImageSearchChainInput,
llm: BaseLLM<any>,
) => {
const schema = z.object({
query: z.string().describe('The image search query.'),
});
const res = await llm.generateObject<z.infer<typeof schema>>({
messages: [
{
role: 'system',
content: imageSearchPrompt,
},
...imageSearchFewShots,
{
role: 'user',
content: `<conversation>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`,
},
],
schema: schema,
});
const searchRes = await searchSearxng(res.query, {
engines: ['bing images', 'google images'],
});
const images: ImageSearchResult[] = [];
searchRes.results.forEach((result) => {
if (result.img_src && result.url && result.title) {
images.push({
img_src: result.img_src,
url: result.url,
title: result.title,
});
}
});
return images.slice(0, 10);
};
export default searchImages;

View File

@@ -0,0 +1,66 @@
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
import { searchSearxng } from '@/lib/searxng';
import {
videoSearchFewShots,
videoSearchPrompt,
} from '@/lib/prompts/media/videos';
import { ChatTurnMessage } from '@/lib/types';
import BaseLLM from '@/lib/models/base/llm';
import z from 'zod';
type VideoSearchChainInput = {
chatHistory: ChatTurnMessage[];
query: string;
};
type VideoSearchResult = {
img_src: string;
url: string;
title: string;
iframe_src: string;
};
const searchVideos = async (
input: VideoSearchChainInput,
llm: BaseLLM<any>,
) => {
const schema = z.object({
query: z.string().describe('The video search query.'),
});
const res = await llm.generateObject<z.infer<typeof schema>>({
messages: [
{
role: 'system',
content: videoSearchPrompt,
},
...videoSearchFewShots,
{
role: 'user',
content: `<conversation>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`,
},
],
schema: schema,
});
const searchRes = await searchSearxng(res.query, {
engines: ['youtube'],
});
const videos: VideoSearchResult[] = [];
searchRes.results.forEach((result) => {
if (result.thumbnail && result.url && result.title && result.iframe_src) {
videos.push({
img_src: result.thumbnail,
url: result.url,
title: result.title,
iframe_src: result.iframe_src,
});
}
});
return videos.slice(0, 10);
};
export default searchVideos;

View File

@@ -0,0 +1,73 @@
import z from 'zod';
import { ClassifierInput, ClassifierOutput } from '../types';
import { WidgetRegistry } from '../widgets';
import { IntentRegistry } from './intents';
import { getClassifierPrompt } from '@/lib/prompts/search/classifier';
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
class Classifier {
async classify(input: ClassifierInput): Promise<ClassifierOutput> {
const availableIntents = IntentRegistry.getAvailableIntents({
sources: input.enabledSources,
});
const availableWidgets = WidgetRegistry.getAll();
const classificationSchema = z.object({
skipSearch: z
.boolean()
.describe(
'Set to true to SKIP search. Skip ONLY when: (1) widgets alone fully answer the query (e.g., weather, stocks, calculator), (2) simple greetings or writing tasks (NOT questions). Set to false for ANY question or information request.',
),
standaloneFollowUp: z
.string()
.describe(
"A self-contained, context-independent reformulation of the user's question. Must include all necessary context from chat history, replace pronouns with specific nouns, and be clear enough to answer without seeing the conversation. Keep the same complexity as the original question.",
),
intents: z
.array(z.enum(availableIntents.map((i) => i.name)))
.describe(
"The intent(s) that best describe how to fulfill the user's query. Can include multiple intents (e.g., ['web_search', 'widget_response'] for 'weather in NYC and recent news'). Always include at least one intent when applicable.",
),
widgets: z
.array(z.union(availableWidgets.map((w) => w.schema)))
.describe(
'Widgets that can display structured data to answer (fully or partially) the query. Include all applicable widgets regardless of skipSearch value.',
),
});
const classifierPrompt = getClassifierPrompt({
intentDesc: IntentRegistry.getDescriptions({
sources: input.enabledSources,
}),
widgetDesc: WidgetRegistry.getDescriptions(),
});
const res = await input.llm.generateObject<
z.infer<typeof classificationSchema>
>({
messages: [
{
role: 'system',
content: classifierPrompt,
},
{
role: 'user',
content: `<conversation>${formatChatHistoryAsString(input.chatHistory)}</conversation>\n\n<query>${input.query}</query>`,
},
],
schema: classificationSchema,
});
res.widgets = res.widgets.map((widgetConfig) => {
return {
type: widgetConfig.type,
params: widgetConfig,
};
});
return res as ClassifierOutput;
}
}
export default Classifier;

View File

@@ -0,0 +1,52 @@
import { Intent } from '../../types';
const description = `Use this intent to search for scholarly articles, research papers, scientific studies, and academic resources when the user explicitly requests credible, peer-reviewed, or authoritative information from academic sources.
#### When to use:
1. User explicitly mentions academic keywords: research papers, scientific studies, scholarly articles, peer-reviewed, journal articles.
2. User asks for scientific evidence or academic research on a topic.
3. User needs authoritative, citation-worthy sources for research or academic purposes.
#### When NOT to use:
1. General questions that don't specifically request academic sources - use 'web_search' instead.
2. User just wants general information without specifying academic sources.
3. Casual queries about facts or current events.
#### Example use cases:
1. "Find scientific papers on climate change effects"
- User explicitly wants scientific papers.
- Intent: ['academic_search'] with skipSearch: false
2. "What does the research say about meditation benefits?"
- User is asking for research-based information.
- Intent: ['academic_search', 'web_search'] with skipSearch: false
3. "Show me peer-reviewed articles on CRISPR technology"
- User specifically wants peer-reviewed academic content.
- Intent: ['academic_search'] with skipSearch: false
4. "I need scholarly sources about renewable energy for my thesis"
- User explicitly needs scholarly/academic sources.
- Intent: ['academic_search'] with skipSearch: false
5. "Explain quantum computing" (WRONG to use academic_search alone)
- This is a general question, not specifically requesting academic papers.
- Correct intent: ['web_search'] with skipSearch: false
- Could combine: ['web_search', 'academic_search'] if you want both general and academic sources
6. "What's the latest study on sleep patterns?"
- User mentions "study" - combine academic and web search for comprehensive results.
- Intent: ['academic_search', 'web_search'] with skipSearch: false
**IMPORTANT**: This intent can be combined with 'web_search' to provide both academic papers and general web information. Always set skipSearch to false when using this intent.
**NOTE**: This intent is only available if academic search sources are enabled in the configuration.`;
const academicSearchIntent: Intent = {
name: 'academic_search',
description,
requiresSearch: true,
enabled: (config) => config.sources.includes('academic'),
};
export default academicSearchIntent;

View File

@@ -0,0 +1,55 @@
import { Intent } from '../../types';
const description = `Use this intent to search through discussion forums, community boards, and social platforms (Reddit, forums, etc.) when the user explicitly wants opinions, personal experiences, community discussions, or crowd-sourced information.
#### When to use:
1. User explicitly mentions: Reddit, forums, discussion boards, community opinions, "what do people think", "user experiences".
2. User is asking for opinions, reviews, or personal experiences about a product, service, or topic.
3. User wants to know what communities or people are saying about something.
#### When NOT to use:
1. General questions that don't specifically ask for opinions or discussions - use 'web_search' instead.
2. User wants factual information or official sources.
3. Casual queries about facts, news, or current events without requesting community input.
#### Example use cases:
1. "What do people on Reddit think about the new iPhone?"
- User explicitly wants Reddit/community opinions.
- Intent: ['discussions_search'] with skipSearch: false
2. "User experiences with Tesla Model 3"
- User is asking for personal experiences from users.
- Intent: ['discussions_search'] with skipSearch: false
3. "Best gaming laptop according to forums"
- User wants forum/community recommendations.
- Intent: ['discussions_search'] with skipSearch: false
4. "What are people saying about the new AI regulations?"
- User wants community discussions/opinions.
- Intent: ['discussions_search', 'web_search'] with skipSearch: false
5. "Reviews and user opinions on the Framework laptop"
- Combines user opinions with general reviews.
- Intent: ['discussions_search', 'web_search'] with skipSearch: false
6. "What's the price of iPhone 15?" (WRONG to use discussions_search)
- This is a factual question, not asking for opinions.
- Correct intent: ['web_search'] with skipSearch: false
7. "Explain how OAuth works" (WRONG to use discussions_search)
- This is asking for information, not community opinions.
- Correct intent: ['web_search'] with skipSearch: false
**IMPORTANT**: This intent can be combined with 'web_search' to provide both community discussions and official/factual information. Always set skipSearch to false when using this intent.
**NOTE**: This intent is only available if discussion search sources are enabled in the configuration.`;
const discussionSearchIntent: Intent = {
name: 'discussions_search',
description,
requiresSearch: true,
enabled: (config) => config.sources.includes('discussions'),
};
export default discussionSearchIntent;

View File

@@ -0,0 +1,16 @@
import academicSearchIntent from './academicSearch';
import discussionSearchIntent from './discussionSearch';
import privateSearchIntent from './privateSearch';
import IntentRegistry from './registry';
import webSearchIntent from './webSearch';
import widgetResponseIntent from './widgetResponse';
import writingTaskIntent from './writingTask';
IntentRegistry.register(webSearchIntent);
IntentRegistry.register(academicSearchIntent);
IntentRegistry.register(discussionSearchIntent);
IntentRegistry.register(widgetResponseIntent);
IntentRegistry.register(writingTaskIntent);
IntentRegistry.register(privateSearchIntent);
export { IntentRegistry };

View File

@@ -0,0 +1,47 @@
import { Intent } from '../../types';
const description = `Use this intent to search through the user's uploaded documents or provided web page links when they ask questions about their personal files or specific URLs.
#### When to use:
1. User explicitly asks about uploaded documents ("tell me about the document I uploaded", "summarize this file").
2. User provides specific URLs/links and asks questions about them ("tell me about example.com", "what's on this page: url.com").
3. User references "my documents", "the file I shared", "this link" when files or URLs are available.
#### When NOT to use:
1. User asks generic questions like "summarize" without providing context or files (later the system will ask what they want summarized).
2. No files have been uploaded and no URLs provided - use web_search or other intents instead.
3. User is asking general questions unrelated to their uploaded content.
#### Example use cases:
1. "Tell me about the PDF I uploaded"
- Files are uploaded, user wants information from them.
- Intent: ['private_search'] with skipSearch: false
2. "What's the main point from example.com?"
- User provided a specific URL to analyze.
- Intent: ['private_search'] with skipSearch: false
3. "Summarize the research paper I shared"
- User references a shared document.
- Intent: ['private_search'] with skipSearch: false
4. "Summarize" (WRONG to use private_search if no files/URLs)
- No context provided, no files uploaded.
- Correct: Skip this intent, let the answer agent ask what to summarize
5. "What does my document say about climate change and also search the web for recent updates?"
- Combine private document search with web search.
- Intent: ['private_search', 'web_search'] with skipSearch: false
**IMPORTANT**: Only use this intent if files are actually uploaded or URLs are explicitly provided in the query. Check the context for uploaded files before selecting this intent. Always set skipSearch to false when using this intent.
**NOTE**: This intent can be combined with other search intents when the user wants both personal document information and external sources.`;
const privateSearchIntent: Intent = {
name: 'private_search',
description,
enabled: (config) => true,
requiresSearch: true,
};
export default privateSearchIntent;

View File

@@ -0,0 +1,31 @@
import { Intent, SearchAgentConfig, SearchSources } from '../../types';
class IntentRegistry {
private static intents = new Map<string, Intent>();
static register(intent: Intent) {
this.intents.set(intent.name, intent);
}
static get(name: string): Intent | undefined {
return this.intents.get(name);
}
static getAvailableIntents(config: { sources: SearchSources[] }): Intent[] {
return Array.from(
this.intents.values().filter((intent) => intent.enabled(config)),
);
}
static getDescriptions(config: { sources: SearchSources[] }): string {
const availableintents = this.getAvailableIntents(config);
return availableintents
.map(
(intent) => `-------\n\n###${intent.name}: ${intent.description}\n\n`,
)
.join('\n\n');
}
}
export default IntentRegistry;

View File

@@ -0,0 +1,31 @@
import { Intent } from '../../types';
const description = `
Use this intent to find current information from the web when the user is asking a question or needs up-to-date information that cannot be provided by widgets or other intents.
#### When to use:
1. Simple user questions about current events, news, weather, or general knowledge that require the latest information and there is no specific better intent to use.
2. When the user explicitly requests information from the web or indicates they want the most recent data (and still there's no other better intent).
3. When no widgets can fully satisfy the user's request for information nor any other specialized search intent applies.
#### Examples use cases:
1. "What is the weather in San Francisco today? ALso tell me some popular events happening there this weekend."
- In this case, the weather widget can provide the current weather, but for popular events, a web search is needed. So the intent should include a 'web_search' & a 'widget_response'.
2. "Who won the Oscar for Best Picture in 2024?"
- This is a straightforward question that requires current information from the web.
3. "Give me the latest news on AI regulations."
- The user is asking for up-to-date news, which necessitates a web search.
**IMPORTANT**: If this intent is given then skip search should be false.
`;
const webSearchIntent: Intent = {
name: 'web_search',
description: description,
requiresSearch: true,
enabled: (config) => config.sources.includes('web'),
};
export default webSearchIntent;

View File

@@ -0,0 +1,47 @@
import { Intent } from '../../types';
const description = `Use this intent when the user's query can be fully or partially answered using specialized widgets that provide structured, real-time data (weather, stocks, calculations, and more).
#### When to use:
1. The user is asking for specific information that a widget can provide (current weather, stock prices, mathematical calculations, unit conversions, etc.).
2. A widget can completely answer the query without needing additional web search (use this intent alone and set skipSearch to true).
3. A widget can provide part of the answer, but additional information from web search or other sources is needed (combine with other intents like 'web_search' and set skipSearch to false).
#### Example use cases:
Note: These are just examples - there are several other widgets available for use depending on the user's query.
1. "What is the weather in New York?"
- The weather widget can fully answer this query.
- Intent: ['widget_response'] with skipSearch: true
- Widget: [{ type: 'weather', location: 'New York', lat: 0, lon: 0 }]
2. "What's the weather in San Francisco today? Also tell me some popular events happening there this weekend."
- Weather widget provides current conditions, but events require web search.
- Intent: ['web_search', 'widget_response'] with skipSearch: false
- Widget: [{ type: 'weather', location: 'San Francisco', lat: 0, lon: 0 }]
3. "Calculate 25% of 480"
- The calculator widget can fully answer this.
- Intent: ['widget_response'] with skipSearch: true
- Widget: [{ type: 'calculator', expression: '25% of 480' }]
4. "AAPL stock price and recent Apple news"
- Stock widget provides price, but news requires web search.
- Intent: ['web_search', 'widget_response'] with skipSearch: false
- Widget: [{ type: 'stock', symbol: 'AAPL' }]
5. "What's Tesla's stock doing and how does it compare to competitors?"
- Stock widget provides Tesla's price, but comparison analysis requires web search.
- Intent: ['web_search', 'widget_response'] with skipSearch: false
- Widget: [{ type: 'stock', symbol: 'TSLA' }]
**IMPORTANT**: Set skipSearch to true ONLY if the widget(s) can completely answer the user's query without any additional information. If the user asks for anything beyond what the widget provides (context, explanations, comparisons, related information), combine this intent with 'web_search' and set skipSearch to false.`;
const widgetResponseIntent: Intent = {
name: 'widget_response',
description,
requiresSearch: false,
enabled: (config) => true,
};
export default widgetResponseIntent;

View File

@@ -0,0 +1,53 @@
import { Intent } from '../../types';
const description = `Use this intent for simple writing or greeting tasks that do not require any external information or facts. This is ONLY for greetings and straightforward creative writing that needs no factual verification.
#### When to use:
1. User greetings or simple social interactions (hello, hi, thanks, goodbye).
2. Creative writing tasks that require NO factual information (poems, birthday messages, thank you notes).
3. Simple drafting tasks where the user provides all necessary information.
#### When NOT to use:
1. ANY question that starts with "what", "how", "why", "when", "where", "who" - these need web_search.
2. Requests for explanations, definitions, or information about anything.
3. Code-related questions or technical help - these need web_search.
4. Writing tasks that require facts, data, or current information.
5. When you're uncertain about any information needed - default to web_search.
#### Example use cases:
1. "Hello" or "Hi there"
- Simple greeting, no information needed.
- Intent: ['writing_task'] with skipSearch: true
2. "Write me a birthday message for my friend"
- Creative writing, no facts needed.
- Intent: ['writing_task'] with skipSearch: true
3. "Draft a thank you email for a job interview"
- Simple writing task, no external information required.
- Intent: ['writing_task'] with skipSearch: true
4. "What is React?" (WRONG to use writing_task)
- This is a QUESTION asking for information.
- Correct intent: ['web_search'] with skipSearch: false
5. "How do I fix this error in Python?" (WRONG to use writing_task)
- This is asking for technical help.
- Correct intent: ['web_search'] with skipSearch: false
6. "Write an email about the latest AI developments" (WRONG to use writing_task alone)
- This requires current information about AI developments.
- Correct intent: ['web_search'] with skipSearch: false
**CRITICAL RULE**: When in doubt, DO NOT use this intent. Default to web_search. This intent should be rare - only use it for greetings and purely creative writing tasks that need absolutely no facts or information.
**IMPORTANT**: If this intent is used alone, skipSearch should be true. Never combine this with other search intents unless you're absolutely certain both are needed.`;
const writingTaskIntent: Intent = {
name: 'writing_task',
description,
requiresSearch: false,
enabled: (config) => true,
};
export default writingTaskIntent;

View File

@@ -0,0 +1,105 @@
import { ResearcherOutput, SearchAgentInput } from './types';
import SessionManager from '@/lib/session';
import Classifier from './classifier';
import { WidgetRegistry } from './widgets';
import Researcher from './researcher';
import { getWriterPrompt } from '@/lib/prompts/search/writer';
import fs from 'fs';
class SearchAgent {
async searchAsync(session: SessionManager, input: SearchAgentInput) {
const classifier = new Classifier();
const classification = await classifier.classify({
chatHistory: input.chatHistory,
enabledSources: input.config.sources,
query: input.followUp,
llm: input.config.llm,
});
const widgetPromise = WidgetRegistry.executeAll(classification.widgets, {
llm: input.config.llm,
embedding: input.config.embedding,
session: session,
}).then((widgetOutputs) => {
widgetOutputs.forEach((o) => {
session.emitBlock({
id: crypto.randomUUID(),
type: 'widget',
data: {
widgetType: o.type,
params: o.data,
},
});
});
return widgetOutputs;
});
let searchPromise: Promise<ResearcherOutput> | null = null;
if (!classification.skipSearch) {
const researcher = new Researcher();
searchPromise = researcher.research(session, {
chatHistory: input.chatHistory,
followUp: input.followUp,
classification: classification,
config: input.config,
});
}
const [widgetOutputs, searchResults] = await Promise.all([
widgetPromise,
searchPromise,
]);
session.emit('data', {
type: 'researchComplete',
});
const finalContext =
searchResults?.findings
.filter((f) => f.type === 'search_results')
.flatMap((f) => f.results)
.map((f) => `${f.metadata.title}: ${f.content}`)
.join('\n') || '';
const widgetContext = widgetOutputs
.map((o) => {
return `${o.type}: ${JSON.stringify(o.data)}`;
})
.join('\n-------------\n');
const finalContextWithWidgets = `<search_results note="These are the search results and you can cite these">${finalContext}</search_results>\n<widgets_result noteForAssistant="Its output is already showed to the user, you can use this information to answer the query but do not CITE this as a souce">${widgetContext}</widgets_result>`;
const writerPrompt = getWriterPrompt(finalContextWithWidgets);
const answerStream = input.config.llm.streamText({
messages: [
{
role: 'system',
content: writerPrompt,
},
...input.chatHistory,
{
role: 'user',
content: input.followUp,
},
],
});
let accumulatedText = '';
for await (const chunk of answerStream) {
accumulatedText += chunk.contentChunk;
session.emit('data', {
type: 'response',
data: chunk.contentChunk,
});
}
session.emit('end', {});
}
}
export default SearchAgent;

View File

@@ -0,0 +1,19 @@
import z from 'zod';
import { ResearchAction } from '../../types';
const doneAction: ResearchAction<any> = {
name: 'done',
description:
"Indicates that the research process is complete and no further actions are needed. Use this action when you have gathered sufficient information to answer the user's query.",
enabled: (_) => true,
schema: z.object({
type: z.literal('done'),
}),
execute: async (params, additionalConfig) => {
return {
type: 'done',
};
},
};
export default doneAction;

View File

@@ -0,0 +1,8 @@
import doneAction from './done';
import ActionRegistry from './registry';
import webSearchAction from './webSearch';
ActionRegistry.register(webSearchAction);
ActionRegistry.register(doneAction);
export { ActionRegistry };

View File

@@ -0,0 +1,73 @@
import {
ActionConfig,
ActionOutput,
AdditionalConfig,
ClassifierOutput,
ResearchAction,
} from '../../types';
class ActionRegistry {
private static actions: Map<string, ResearchAction> = new Map();
static register(action: ResearchAction<any>) {
this.actions.set(action.name, action);
}
static get(name: string): ResearchAction | undefined {
return this.actions.get(name);
}
static getAvailableActions(config: {
classification: ClassifierOutput;
}): ResearchAction[] {
return Array.from(
this.actions.values().filter((action) => action.enabled(config)),
);
}
static getAvailableActionsDescriptions(config: {
classification: ClassifierOutput;
}): string {
const availableActions = this.getAvailableActions(config);
return availableActions
.map((action) => `------------\n##${action.name}\n${action.description}`)
.join('\n\n');
}
static async execute(
name: string,
params: any,
additionalConfig: AdditionalConfig,
) {
const action = this.actions.get(name);
if (!action) {
throw new Error(`Action with name ${name} not found`);
}
return action.execute(params, additionalConfig);
}
static async executeAll(
actions: ActionConfig[],
additionalConfig: AdditionalConfig,
): Promise<ActionOutput[]> {
const results: ActionOutput[] = [];
await Promise.all(
actions.map(async (actionConfig) => {
const output = await this.execute(
actionConfig.type,
actionConfig.params,
additionalConfig,
);
results.push(output);
}),
);
return results;
}
}
export default ActionRegistry;

View File

@@ -0,0 +1,56 @@
import z from 'zod';
import { ResearchAction } from '../../types';
import { searchSearxng } from '@/lib/searxng';
import { Chunk } from '@/lib/types';
const actionSchema = z.object({
type: z.literal('web_search'),
queries: z
.array(z.string())
.describe('An array of search queries to perform web searches for.'),
});
const actionDescription = `
You have to use this action aggressively to find relevant information from the web to answer user queries. You can combine this action with other actions to gather comprehensive data. Always ensure that you provide accurate and up-to-date information by leveraging web search results.
When this action is present, you must use it to obtain current information from the web.
### How to use:
1. For speed search mode, you can use this action once. Make sure to cover all aspects of the user's query in that single search.
2. If you're on quality mode, you'll get to use this action up to two times. Use the first search to gather general information, and the second search to fill in any gaps or get more specific details based on the initial findings.
3. If you're set on quality mode, then you will get to use this action multiple times to gather more information. Use your judgment to decide when additional searches are necessary to provide a thorough and accurate response.
Input: An array of search queries. Make sure the queries are relevant to the user's request and cover different aspects if necessary. You can include a maximum of 3 queries. Make sure the queries are SEO friendly and not sentences rather keywords which can be used to search a search engine like Google, Bing, etc.
`;
const webSearchAction: ResearchAction<typeof actionSchema> = {
name: 'web_search',
description: actionDescription,
schema: actionSchema,
enabled: (config) => config.classification.intents.includes('web_search'),
execute: async (input, _) => {
let results: Chunk[] = [];
const search = async (q: string) => {
const res = await searchSearxng(q);
res.results.forEach((r) => {
results.push({
content: r.content || r.title,
metadata: {
title: r.title,
url: r.url,
},
});
});
};
await Promise.all(input.queries.map(search));
return {
type: 'search_results',
results,
};
},
};
export default webSearchAction;

View File

@@ -0,0 +1,231 @@
import z from 'zod';
import {
ActionConfig,
ActionOutput,
ResearcherInput,
ResearcherOutput,
} from '../types';
import { ActionRegistry } from './actions';
import { getResearcherPrompt } from '@/lib/prompts/search/researcher';
import SessionManager from '@/lib/session';
import { ReasoningResearchBlock } from '@/lib/types';
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
class Researcher {
async research(
session: SessionManager,
input: ResearcherInput,
): Promise<ResearcherOutput> {
let findings: string = '';
let actionOutput: ActionOutput[] = [];
let maxIteration =
input.config.mode === 'speed'
? 1
: input.config.mode === 'balanced'
? 3
: 25;
const availableActions = ActionRegistry.getAvailableActions({
classification: input.classification,
});
const schema = z.object({
reasoning: z
.string()
.describe('The reasoning behind choosing the next action.'),
action: z
.union(availableActions.map((a) => a.schema))
.describe('The action to be performed next.'),
});
const availableActionsDescription =
ActionRegistry.getAvailableActionsDescriptions({
classification: input.classification,
});
const researchBlockId = crypto.randomUUID();
session.emitBlock({
id: researchBlockId,
type: 'research',
data: {
subSteps: [],
},
});
for (let i = 0; i < maxIteration; i++) {
const researcherPrompt = getResearcherPrompt(
availableActionsDescription,
input.config.mode,
i,
maxIteration,
);
const actionStream = input.config.llm.streamObject<
z.infer<typeof schema>
>({
messages: [
{
role: 'system',
content: researcherPrompt,
},
{
role: 'user',
content: `
<conversation>
${formatChatHistoryAsString(input.chatHistory.slice(-10))}
User: ${input.followUp} (Standalone question: ${input.classification.standaloneFollowUp})
</conversation>
<previous_actions>
${findings}
</previous_actions>
`,
},
],
schema,
});
const block = session.getBlock(researchBlockId);
let reasoningEmitted = false;
let reasoningId = crypto.randomUUID();
let finalActionRes: any;
for await (const partialRes of actionStream) {
try {
if (
partialRes.reasoning &&
!reasoningEmitted &&
block &&
block.type === 'research'
) {
reasoningEmitted = true;
block.data.subSteps.push({
id: reasoningId,
type: 'reasoning',
reasoning: partialRes.reasoning,
});
session.updateBlock(researchBlockId, [
{
op: 'replace',
path: '/data/subSteps',
value: block.data.subSteps,
},
]);
} else if (
partialRes.reasoning &&
reasoningEmitted &&
block &&
block.type === 'research'
) {
const subStepIndex = block.data.subSteps.findIndex(
(step: any) => step.id === reasoningId,
);
if (subStepIndex !== -1) {
const subStep = block.data.subSteps[
subStepIndex
] as ReasoningResearchBlock;
subStep.reasoning = partialRes.reasoning;
session.updateBlock(researchBlockId, [
{
op: 'replace',
path: '/data/subSteps',
value: block.data.subSteps,
},
]);
}
}
finalActionRes = partialRes;
} catch (e) {
// nothing
}
}
if (finalActionRes.action.type === 'done') {
break;
}
const actionConfig: ActionConfig = {
type: finalActionRes.action.type as string,
params: finalActionRes.action,
};
const queries = actionConfig.params.queries || [];
if (block && block.type === 'research') {
block.data.subSteps.push({
id: crypto.randomUUID(),
type: 'searching',
searching: queries,
});
session.updateBlock(researchBlockId, [
{ op: 'replace', path: '/data/subSteps', value: block.data.subSteps },
]);
}
findings += `\n---\nIteration ${i + 1}:\n`;
findings += 'Reasoning: ' + finalActionRes.reasoning + '\n';
findings += `Executing Action: ${actionConfig.type} with params ${JSON.stringify(actionConfig.params)}\n`;
const actionResult = await ActionRegistry.execute(
actionConfig.type,
actionConfig.params,
{
llm: input.config.llm,
embedding: input.config.embedding,
session: session,
},
);
actionOutput.push(actionResult);
if (actionResult.type === 'search_results') {
if (block && block.type === 'research') {
block.data.subSteps.push({
id: crypto.randomUUID(),
type: 'reading',
reading: actionResult.results,
});
session.updateBlock(researchBlockId, [
{
op: 'replace',
path: '/data/subSteps',
value: block.data.subSteps,
},
]);
}
findings += actionResult.results
.map(
(r) =>
`Title: ${r.metadata.title}\nURL: ${r.metadata.url}\nContent: ${r.content}\n`,
)
.join('\n');
}
findings += '\n---------\n';
}
const searchResults = actionOutput.filter(
(a) => a.type === 'search_results',
);
session.emit('data', {
type: 'sources',
data: searchResults
.flatMap((a) => a.results)
.map((r) => ({
content: r.content,
metadata: r.metadata,
})),
});
return {
findings: actionOutput,
};
}
}
export default Researcher;

View File

@@ -0,0 +1,107 @@
import z from 'zod';
import BaseLLM from '../../models/base/llm';
import BaseEmbedding from '@/lib/models/base/embedding';
import SessionManager from '@/lib/session';
import { ChatTurnMessage, Chunk } from '@/lib/types';
export type SearchSources = 'web' | 'discussions' | 'academic';
export type SearchAgentConfig = {
sources: SearchSources[];
llm: BaseLLM<any>;
embedding: BaseEmbedding<any>;
mode: 'speed' | 'balanced' | 'quality';
};
export type SearchAgentInput = {
chatHistory: ChatTurnMessage[];
followUp: string;
config: SearchAgentConfig;
};
export interface Intent {
name: string;
description: string;
requiresSearch: boolean;
enabled: (config: { sources: SearchSources[] }) => boolean;
}
export type Widget<TSchema extends z.ZodObject<any> = z.ZodObject<any>> = {
name: string;
description: string;
schema: TSchema;
execute: (
params: z.infer<TSchema>,
additionalConfig: AdditionalConfig,
) => Promise<WidgetOutput>;
};
export type WidgetConfig = {
type: string;
params: Record<string, any>;
};
export type WidgetOutput = {
type: string;
data: any;
};
export type ClassifierInput = {
llm: BaseLLM<any>;
enabledSources: SearchSources[];
query: string;
chatHistory: ChatTurnMessage[];
};
export type ClassifierOutput = {
skipSearch: boolean;
standaloneFollowUp: string;
intents: string[];
widgets: WidgetConfig[];
};
export type AdditionalConfig = {
llm: BaseLLM<any>;
embedding: BaseEmbedding<any>;
session: SessionManager;
};
export type ResearcherInput = {
chatHistory: ChatTurnMessage[];
followUp: string;
classification: ClassifierOutput;
config: SearchAgentConfig;
};
export type ResearcherOutput = {
findings: ActionOutput[];
};
export type SearchActionOutput = {
type: 'search_results';
results: Chunk[];
};
export type DoneActionOutput = {
type: 'done';
};
export type ActionOutput = SearchActionOutput | DoneActionOutput;
export interface ResearchAction<
TSchema extends z.ZodObject<any> = z.ZodObject<any>,
> {
name: string;
description: string;
schema: z.ZodObject<any>;
enabled: (config: { classification: ClassifierOutput }) => boolean;
execute: (
params: z.infer<TSchema>,
additionalConfig: AdditionalConfig,
) => Promise<ActionOutput>;
}
export type ActionConfig = {
type: string;
params: Record<string, any>;
};

View File

@@ -0,0 +1,65 @@
import z from 'zod';
import { Widget } from '../types';
import { evaluate as mathEval } from 'mathjs';
const schema = z.object({
type: z.literal('calculation'),
expression: z
.string()
.describe(
"A valid mathematical expression to be evaluated (e.g., '2 + 2', '3 * (4 + 5)').",
),
});
const calculationWidget: Widget<typeof schema> = {
name: 'calculation',
description: `Performs mathematical calculations and evaluates mathematical expressions. Supports arithmetic operations, algebraic equations, functions, and complex mathematical computations.
**What it provides:**
- Evaluates mathematical expressions and returns computed results
- Handles basic arithmetic (+, -, *, /)
- Supports functions (sqrt, sin, cos, log, etc.)
- Can process complex expressions with parentheses and order of operations
**When to use:**
- User asks to calculate, compute, or evaluate a mathematical expression
- Questions like "what is X", "calculate Y", "how much is Z" where X/Y/Z are math expressions
- Any request involving numbers and mathematical operations
**Example call:**
{
"type": "calculation",
"expression": "25% of 480"
}
{
"type": "calculation",
"expression": "sqrt(144) + 5 * 2"
}
**Important:** The expression must be valid mathematical syntax that can be evaluated by mathjs. Format percentages as "0.25 * 480" or "25% of 480". Do not include currency symbols, units, or non-mathematical text in the expression.`,
schema: schema,
execute: async (params, _) => {
try {
const result = mathEval(params.expression);
return {
type: 'calculation_result',
data: {
expression: params.expression,
result: result,
},
};
} catch (error) {
return {
type: 'calculation_result',
data: {
expression: params.expression,
result: `Error evaluating expression: ${error}`,
},
};
}
},
};
export default calculationWidget;

View File

@@ -0,0 +1,10 @@
import calculationWidget from './calculationWidget';
import WidgetRegistry from './registry';
import weatherWidget from './weatherWidget';
import stockWidget from './stockWidget';
WidgetRegistry.register(weatherWidget);
WidgetRegistry.register(calculationWidget);
WidgetRegistry.register(stockWidget);
export { WidgetRegistry };

View File

@@ -0,0 +1,65 @@
import {
AdditionalConfig,
SearchAgentConfig,
Widget,
WidgetConfig,
WidgetOutput,
} from '../types';
class WidgetRegistry {
private static widgets = new Map<string, Widget>();
static register(widget: Widget<any>) {
this.widgets.set(widget.name, widget);
}
static get(name: string): Widget | undefined {
return this.widgets.get(name);
}
static getAll(): Widget[] {
return Array.from(this.widgets.values());
}
static getDescriptions(): string {
return Array.from(this.widgets.values())
.map((widget) => `${widget.name}: ${widget.description}`)
.join('\n\n');
}
static async execute(
name: string,
params: any,
config: AdditionalConfig,
): Promise<WidgetOutput> {
const widget = this.get(name);
if (!widget) {
throw new Error(`Widget with name ${name} not found`);
}
return widget.execute(params, config);
}
static async executeAll(
widgets: WidgetConfig[],
additionalConfig: AdditionalConfig,
): Promise<WidgetOutput[]> {
const results: WidgetOutput[] = [];
await Promise.all(
widgets.map(async (widgetConfig) => {
const output = await this.execute(
widgetConfig.type,
widgetConfig.params,
additionalConfig,
);
results.push(output);
}),
);
return results;
}
}
export default WidgetRegistry;

View File

@@ -0,0 +1,412 @@
import z from 'zod';
import { Widget } from '../types';
import YahooFinance from 'yahoo-finance2';
const yf = new YahooFinance({
suppressNotices: ['yahooSurvey'],
});
const schema = z.object({
type: z.literal('stock'),
ticker: z
.string()
.describe(
"The stock ticker symbol in uppercase (e.g., 'AAPL' for Apple Inc., 'TSLA' for Tesla, 'GOOGL' for Google). Use the primary exchange ticker.",
),
comparisonTickers: z
.array(z.string())
.max(3)
.describe(
"Optional array of up to 3 ticker symbols to compare against the base ticker (e.g., ['MSFT', 'GOOGL', 'META']). Charts will show percentage change comparison.",
),
});
const stockWidget: Widget<typeof schema> = {
name: 'stock',
description: `Provides comprehensive real-time stock market data and financial information for any publicly traded company. Returns detailed quote data, market status, trading metrics, and company fundamentals.
You can set skipSearch to true if the stock widget can fully answer the user's query without needing additional web search.
**What it provides:**
- **Real-time Price Data**: Current price, previous close, open price, day's range (high/low)
- **Market Status**: Whether market is currently open or closed, trading sessions
- **Trading Metrics**: Volume, average volume, bid/ask prices and sizes
- **Performance**: Price changes (absolute and percentage), 52-week high/low range
- **Valuation**: Market capitalization, P/E ratio, earnings per share (EPS)
- **Dividends**: Dividend rate, dividend yield, ex-dividend date
- **Company Info**: Full company name, exchange, currency, sector/industry (when available)
- **Advanced Metrics**: Beta, trailing/forward P/E, book value, price-to-book ratio
- **Charts Data**: Historical price movements for visualization
- **Comparison**: Compare up to 3 stocks side-by-side with percentage-based performance visualization
**When to use:**
- User asks about a stock price ("What's AAPL stock price?", "How is Tesla doing?")
- Questions about company market performance ("Is Microsoft up or down today?")
- Requests for stock market data, trading info, or company valuation
- Queries about dividends, P/E ratio, market cap, or other financial metrics
- Any stock/equity-related question for a specific company
- Stock comparisons ("Compare AAPL vs MSFT", "How is TSLA doing vs RIVN and LCID?")
**Example calls:**
{
"type": "stock",
"ticker": "AAPL"
}
{
"type": "stock",
"ticker": "TSLA",
"comparisonTickers": ["RIVN", "LCID"]
}
{
"type": "stock",
"ticker": "GOOGL",
"comparisonTickers": ["MSFT", "META", "AMZN"]
}
**Important:**
- Use the correct ticker symbol (uppercase preferred: AAPL not aapl)
- For companies with multiple share classes, use the most common one (e.g., GOOGL for Google Class A shares)
- The widget works for stocks listed on major exchanges (NYSE, NASDAQ, etc.)
- Returns comprehensive data; the UI will display relevant metrics based on availability
- Market data may be delayed by 15-20 minutes for free data sources during trading hours`,
schema: schema,
execute: async (params, _) => {
try {
const ticker = params.ticker.toUpperCase();
const quote: any = await yf.quote(ticker);
const chartPromises = {
'1D': yf
.chart(ticker, {
period1: new Date(Date.now() - 2 * 24 * 60 * 60 * 1000),
period2: new Date(),
interval: '5m',
})
.catch(() => null),
'5D': yf
.chart(ticker, {
period1: new Date(Date.now() - 6 * 24 * 60 * 60 * 1000),
period2: new Date(),
interval: '15m',
})
.catch(() => null),
'1M': yf
.chart(ticker, {
period1: new Date(Date.now() - 30 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
'3M': yf
.chart(ticker, {
period1: new Date(Date.now() - 90 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
'6M': yf
.chart(ticker, {
period1: new Date(Date.now() - 180 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
'1Y': yf
.chart(ticker, {
period1: new Date(Date.now() - 365 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
MAX: yf
.chart(ticker, {
period1: new Date(Date.now() - 10 * 365 * 24 * 60 * 60 * 1000),
interval: '1wk',
})
.catch(() => null),
};
const charts = await Promise.all([
chartPromises['1D'],
chartPromises['5D'],
chartPromises['1M'],
chartPromises['3M'],
chartPromises['6M'],
chartPromises['1Y'],
chartPromises['MAX'],
]);
const [chart1D, chart5D, chart1M, chart3M, chart6M, chart1Y, chartMAX] =
charts;
if (!quote) {
throw new Error(`No data found for ticker: ${ticker}`);
}
let comparisonData: any = null;
if (params.comparisonTickers.length > 0) {
const comparisonPromises = params.comparisonTickers
.slice(0, 3)
.map(async (compTicker) => {
try {
const compQuote = await yf.quote(compTicker);
const compCharts = await Promise.all([
yf
.chart(compTicker, {
period1: new Date(Date.now() - 2 * 24 * 60 * 60 * 1000),
period2: new Date(),
interval: '5m',
})
.catch(() => null),
yf
.chart(compTicker, {
period1: new Date(Date.now() - 6 * 24 * 60 * 60 * 1000),
period2: new Date(),
interval: '15m',
})
.catch(() => null),
yf
.chart(compTicker, {
period1: new Date(Date.now() - 30 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
yf
.chart(compTicker, {
period1: new Date(Date.now() - 90 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
yf
.chart(compTicker, {
period1: new Date(Date.now() - 180 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
yf
.chart(compTicker, {
period1: new Date(Date.now() - 365 * 24 * 60 * 60 * 1000),
interval: '1d',
})
.catch(() => null),
yf
.chart(compTicker, {
period1: new Date(
Date.now() - 10 * 365 * 24 * 60 * 60 * 1000,
),
interval: '1wk',
})
.catch(() => null),
]);
return {
ticker: compTicker,
name: compQuote.shortName || compTicker,
charts: compCharts,
};
} catch (error) {
console.error(
`Failed to fetch comparison ticker ${compTicker}:`,
error,
);
return null;
}
});
const compResults = await Promise.all(comparisonPromises);
comparisonData = compResults.filter((r) => r !== null);
}
const stockData = {
symbol: quote.symbol,
shortName: quote.shortName || quote.longName || ticker,
longName: quote.longName,
exchange: quote.fullExchangeName || quote.exchange,
currency: quote.currency,
quoteType: quote.quoteType,
marketState: quote.marketState,
regularMarketTime: quote.regularMarketTime,
postMarketTime: quote.postMarketTime,
preMarketTime: quote.preMarketTime,
regularMarketPrice: quote.regularMarketPrice,
regularMarketChange: quote.regularMarketChange,
regularMarketChangePercent: quote.regularMarketChangePercent,
regularMarketPreviousClose: quote.regularMarketPreviousClose,
regularMarketOpen: quote.regularMarketOpen,
regularMarketDayHigh: quote.regularMarketDayHigh,
regularMarketDayLow: quote.regularMarketDayLow,
postMarketPrice: quote.postMarketPrice,
postMarketChange: quote.postMarketChange,
postMarketChangePercent: quote.postMarketChangePercent,
preMarketPrice: quote.preMarketPrice,
preMarketChange: quote.preMarketChange,
preMarketChangePercent: quote.preMarketChangePercent,
regularMarketVolume: quote.regularMarketVolume,
averageDailyVolume3Month: quote.averageDailyVolume3Month,
averageDailyVolume10Day: quote.averageDailyVolume10Day,
bid: quote.bid,
bidSize: quote.bidSize,
ask: quote.ask,
askSize: quote.askSize,
fiftyTwoWeekLow: quote.fiftyTwoWeekLow,
fiftyTwoWeekHigh: quote.fiftyTwoWeekHigh,
fiftyTwoWeekChange: quote.fiftyTwoWeekChange,
fiftyTwoWeekChangePercent: quote.fiftyTwoWeekChangePercent,
marketCap: quote.marketCap,
trailingPE: quote.trailingPE,
forwardPE: quote.forwardPE,
priceToBook: quote.priceToBook,
bookValue: quote.bookValue,
earningsPerShare: quote.epsTrailingTwelveMonths,
epsForward: quote.epsForward,
dividendRate: quote.dividendRate,
dividendYield: quote.dividendYield,
exDividendDate: quote.exDividendDate,
trailingAnnualDividendRate: quote.trailingAnnualDividendRate,
trailingAnnualDividendYield: quote.trailingAnnualDividendYield,
beta: quote.beta,
fiftyDayAverage: quote.fiftyDayAverage,
fiftyDayAverageChange: quote.fiftyDayAverageChange,
fiftyDayAverageChangePercent: quote.fiftyDayAverageChangePercent,
twoHundredDayAverage: quote.twoHundredDayAverage,
twoHundredDayAverageChange: quote.twoHundredDayAverageChange,
twoHundredDayAverageChangePercent:
quote.twoHundredDayAverageChangePercent,
sector: quote.sector,
industry: quote.industry,
website: quote.website,
chartData: {
'1D': chart1D
? {
timestamps: chart1D.quotes.map((q: any) => q.date.getTime()),
prices: chart1D.quotes.map((q: any) => q.close),
}
: null,
'5D': chart5D
? {
timestamps: chart5D.quotes.map((q: any) => q.date.getTime()),
prices: chart5D.quotes.map((q: any) => q.close),
}
: null,
'1M': chart1M
? {
timestamps: chart1M.quotes.map((q: any) => q.date.getTime()),
prices: chart1M.quotes.map((q: any) => q.close),
}
: null,
'3M': chart3M
? {
timestamps: chart3M.quotes.map((q: any) => q.date.getTime()),
prices: chart3M.quotes.map((q: any) => q.close),
}
: null,
'6M': chart6M
? {
timestamps: chart6M.quotes.map((q: any) => q.date.getTime()),
prices: chart6M.quotes.map((q: any) => q.close),
}
: null,
'1Y': chart1Y
? {
timestamps: chart1Y.quotes.map((q: any) => q.date.getTime()),
prices: chart1Y.quotes.map((q: any) => q.close),
}
: null,
MAX: chartMAX
? {
timestamps: chartMAX.quotes.map((q: any) => q.date.getTime()),
prices: chartMAX.quotes.map((q: any) => q.close),
}
: null,
},
comparisonData: comparisonData
? comparisonData.map((comp: any) => ({
ticker: comp.ticker,
name: comp.name,
chartData: {
'1D': comp.charts[0]
? {
timestamps: comp.charts[0].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[0].quotes.map((q: any) => q.close),
}
: null,
'5D': comp.charts[1]
? {
timestamps: comp.charts[1].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[1].quotes.map((q: any) => q.close),
}
: null,
'1M': comp.charts[2]
? {
timestamps: comp.charts[2].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[2].quotes.map((q: any) => q.close),
}
: null,
'3M': comp.charts[3]
? {
timestamps: comp.charts[3].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[3].quotes.map((q: any) => q.close),
}
: null,
'6M': comp.charts[4]
? {
timestamps: comp.charts[4].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[4].quotes.map((q: any) => q.close),
}
: null,
'1Y': comp.charts[5]
? {
timestamps: comp.charts[5].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[5].quotes.map((q: any) => q.close),
}
: null,
MAX: comp.charts[6]
? {
timestamps: comp.charts[6].quotes.map((q: any) =>
q.date.getTime(),
),
prices: comp.charts[6].quotes.map((q: any) => q.close),
}
: null,
},
}))
: null,
};
return {
type: 'stock',
data: stockData,
};
} catch (error: any) {
return {
type: 'stock',
data: {
error: `Error fetching stock data: ${error.message || error}`,
ticker: params.ticker,
},
};
}
},
};
export default stockWidget;

View File

@@ -0,0 +1,174 @@
import z from 'zod';
import { Widget } from '../types';
const WeatherWidgetSchema = z.object({
type: z.literal('weather'),
location: z
.string()
.describe(
'Human-readable location name (e.g., "New York, NY, USA", "London, UK"). Use this OR lat/lon coordinates, never both. Leave empty string if providing coordinates.',
),
lat: z
.number()
.describe(
'Latitude coordinate in decimal degrees (e.g., 40.7128). Only use when location name is empty.',
),
lon: z
.number()
.describe(
'Longitude coordinate in decimal degrees (e.g., -74.0060). Only use when location name is empty.',
),
});
const weatherWidget: Widget<typeof WeatherWidgetSchema> = {
name: 'weather',
description: `Provides comprehensive current weather information and forecasts for any location worldwide. Returns real-time weather data including temperature, conditions, humidity, wind, and multi-day forecasts.
You can set skipSearch to true if the weather widget can fully answer the user's query without needing additional web search.
**What it provides:**
- Current weather conditions (temperature, feels-like, humidity, precipitation)
- Wind speed, direction, and gusts
- Weather codes/conditions (clear, cloudy, rainy, etc.)
- Hourly forecast for next 24 hours
- Daily forecast for next 7 days (high/low temps, precipitation probability)
- Timezone information
**When to use:**
- User asks about weather in a location ("weather in X", "is it raining in Y")
- Questions about temperature, conditions, or forecast
- Any weather-related query for a specific place
**Example call:**
{
"type": "weather",
"location": "San Francisco, CA, USA",
"lat": 0,
"lon": 0
}
**Important:** Provide EITHER a location name OR latitude/longitude coordinates, never both. If using location name, set lat/lon to 0. Location should be specific (city, state/region, country) for best results.`,
schema: WeatherWidgetSchema,
execute: async (params, _) => {
try {
if (
params.location === '' &&
(params.lat === undefined || params.lon === undefined)
) {
throw new Error(
'Either location name or both latitude and longitude must be provided.',
);
}
if (params.location !== '') {
const openStreetMapUrl = `https://nominatim.openstreetmap.org/search?q=${encodeURIComponent(params.location)}&format=json&limit=1`;
const locationRes = await fetch(openStreetMapUrl, {
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
});
const data = await locationRes.json();
const location = data[0];
if (!location) {
throw new Error(
`Could not find coordinates for location: ${params.location}`,
);
}
const weatherRes = await fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${location.lat}&longitude=${location.lon}&current=temperature_2m,relative_humidity_2m,apparent_temperature,is_day,precipitation,rain,showers,snowfall,weather_code,cloud_cover,pressure_msl,surface_pressure,wind_speed_10m,wind_direction_10m,wind_gusts_10m&hourly=temperature_2m,precipitation_probability,precipitation,weather_code&daily=weather_code,temperature_2m_max,temperature_2m_min,precipitation_sum,precipitation_probability_max&timezone=auto&forecast_days=7`,
{
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
},
);
const weatherData = await weatherRes.json();
return {
type: 'weather',
data: {
location: params.location,
latitude: location.lat,
longitude: location.lon,
current: weatherData.current,
hourly: {
time: weatherData.hourly.time.slice(0, 24),
temperature_2m: weatherData.hourly.temperature_2m.slice(0, 24),
precipitation_probability:
weatherData.hourly.precipitation_probability.slice(0, 24),
precipitation: weatherData.hourly.precipitation.slice(0, 24),
weather_code: weatherData.hourly.weather_code.slice(0, 24),
},
daily: weatherData.daily,
timezone: weatherData.timezone,
},
};
} else if (params.lat !== undefined && params.lon !== undefined) {
const [weatherRes, locationRes] = await Promise.all([
fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${params.lat}&longitude=${params.lon}&current=temperature_2m,relative_humidity_2m,apparent_temperature,is_day,precipitation,rain,showers,snowfall,weather_code,cloud_cover,pressure_msl,surface_pressure,wind_speed_10m,wind_direction_10m,wind_gusts_10m&hourly=temperature_2m,precipitation_probability,precipitation,weather_code&daily=weather_code,temperature_2m_max,temperature_2m_min,precipitation_sum,precipitation_probability_max&timezone=auto&forecast_days=7`,
{
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
},
),
fetch(
`https://nominatim.openstreetmap.org/reverse?lat=${params.lat}&lon=${params.lon}&format=json`,
{
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
},
),
]);
const weatherData = await weatherRes.json();
const locationData = await locationRes.json();
return {
type: 'weather',
data: {
location: locationData.display_name,
latitude: params.lat,
longitude: params.lon,
current: weatherData.current,
hourly: {
time: weatherData.hourly.time.slice(0, 24),
temperature_2m: weatherData.hourly.temperature_2m.slice(0, 24),
precipitation_probability:
weatherData.hourly.precipitation_probability.slice(0, 24),
precipitation: weatherData.hourly.precipitation.slice(0, 24),
weather_code: weatherData.hourly.weather_code.slice(0, 24),
},
daily: weatherData.daily,
timezone: weatherData.timezone,
},
};
}
return {
type: 'weather',
data: null,
};
} catch (err) {
return {
type: 'weather',
data: {
error: `Error fetching weather data: ${err}`,
},
};
}
},
};
export default weatherWidget;

View File

@@ -0,0 +1,39 @@
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
import { suggestionGeneratorPrompt } from '@/lib/prompts/suggestions';
import { ChatTurnMessage } from '@/lib/types';
import z from 'zod';
import BaseLLM from '@/lib/models/base/llm';
import { i } from 'mathjs';
type SuggestionGeneratorInput = {
chatHistory: ChatTurnMessage[];
};
const schema = z.object({
suggestions: z
.array(z.string())
.describe('List of suggested questions or prompts'),
});
const generateSuggestions = async (
input: SuggestionGeneratorInput,
llm: BaseLLM<any>,
) => {
const res = await llm.generateObject<z.infer<typeof schema>>({
messages: [
{
role: 'system',
content: suggestionGeneratorPrompt,
},
{
role: 'user',
content: `<chat_history>\n${formatChatHistoryAsString(input.chatHistory)}\n</chat_history>`,
},
],
schema,
});
return res.suggestions;
};
export default generateSuggestions;

View File

@@ -1,105 +0,0 @@
import {
RunnableSequence,
RunnableMap,
RunnableLambda,
} from '@langchain/core/runnables';
import { ChatPromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers';
import { searchSearxng } from '../searxng';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import LineOutputParser from '../outputParsers/lineOutputParser';
const imageSearchChainPrompt = `
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
`;
type ImageSearchChainInput = {
chat_history: BaseMessage[];
query: string;
};
interface ImageSearchResult {
img_src: string;
url: string;
title: string;
}
const strParser = new StringOutputParser();
const createImageSearchChain = (llm: BaseChatModel) => {
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: ImageSearchChainInput) => {
return formatChatHistoryAsString(input.chat_history);
},
query: (input: ImageSearchChainInput) => {
return input.query;
},
}),
ChatPromptTemplate.fromMessages([
['system', imageSearchChainPrompt],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
],
['assistant', '<query>A cat</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
],
['assistant', '<query>Car working</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
],
['assistant', '<query>AC working</query>'],
[
'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
],
]),
llm,
strParser,
RunnableLambda.from(async (input: string) => {
const queryParser = new LineOutputParser({
key: 'query',
});
return await queryParser.parse(input);
}),
RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, {
engines: ['bing images', 'google images'],
});
const images: ImageSearchResult[] = [];
res.results.forEach((result) => {
if (result.img_src && result.url && result.title) {
images.push({
img_src: result.img_src,
url: result.url,
title: result.title,
});
}
});
return images.slice(0, 10);
}),
]);
};
const handleImageSearch = (
input: ImageSearchChainInput,
llm: BaseChatModel,
) => {
const imageSearchChain = createImageSearchChain(llm);
return imageSearchChain.invoke(input);
};
export default handleImageSearch;

View File

@@ -1,55 +0,0 @@
import { RunnableSequence, RunnableMap } from '@langchain/core/runnables';
import ListLineOutputParser from '../outputParsers/listLineOutputParser';
import { PromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatOpenAI } from '@langchain/openai';
const suggestionGeneratorPrompt = `
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information.
Make sure the suggestions are medium in length and are informative and relevant to the conversation.
Provide these suggestions separated by newlines between the XML tags <suggestions> and </suggestions>. For example:
<suggestions>
Tell me more about SpaceX and their recent projects
What is the latest news on SpaceX?
Who is the CEO of SpaceX?
</suggestions>
Conversation:
{chat_history}
`;
type SuggestionGeneratorInput = {
chat_history: BaseMessage[];
};
const outputParser = new ListLineOutputParser({
key: 'suggestions',
});
const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: SuggestionGeneratorInput) =>
formatChatHistoryAsString(input.chat_history),
}),
PromptTemplate.fromTemplate(suggestionGeneratorPrompt),
llm,
outputParser,
]);
};
const generateSuggestions = (
input: SuggestionGeneratorInput,
llm: BaseChatModel,
) => {
(llm as unknown as ChatOpenAI).temperature = 0;
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
return suggestionGeneratorChain.invoke(input);
};
export default generateSuggestions;

View File

@@ -1,110 +0,0 @@
import {
RunnableSequence,
RunnableMap,
RunnableLambda,
} from '@langchain/core/runnables';
import { ChatPromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers';
import { searchSearxng } from '../searxng';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import LineOutputParser from '../outputParsers/lineOutputParser';
const videoSearchChainPrompt = `
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
`;
type VideoSearchChainInput = {
chat_history: BaseMessage[];
query: string;
};
interface VideoSearchResult {
img_src: string;
url: string;
title: string;
iframe_src: string;
}
const strParser = new StringOutputParser();
const createVideoSearchChain = (llm: BaseChatModel) => {
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: VideoSearchChainInput) => {
return formatChatHistoryAsString(input.chat_history);
},
query: (input: VideoSearchChainInput) => {
return input.query;
},
}),
ChatPromptTemplate.fromMessages([
['system', videoSearchChainPrompt],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
],
['assistant', '<query>How does a car work?</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
],
['assistant', '<query>Theory of relativity</query>'],
[
'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
],
['assistant', '<query>AC working</query>'],
[
'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
],
]),
llm,
strParser,
RunnableLambda.from(async (input: string) => {
const queryParser = new LineOutputParser({
key: 'query',
});
return await queryParser.parse(input);
}),
RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, {
engines: ['youtube'],
});
const videos: VideoSearchResult[] = [];
res.results.forEach((result) => {
if (
result.thumbnail &&
result.url &&
result.title &&
result.iframe_src
) {
videos.push({
img_src: result.thumbnail,
url: result.url,
title: result.title,
iframe_src: result.iframe_src,
});
}
});
return videos.slice(0, 10);
}),
]);
};
const handleVideoSearch = (
input: VideoSearchChainInput,
llm: BaseChatModel,
) => {
const videoSearchChain = createVideoSearchChain(llm);
return videoSearchChain.invoke(input);
};
export default handleVideoSearch;

View File

@@ -6,11 +6,8 @@ const getClientConfig = (key: string, defaultVal?: any) => {
export const getTheme = () => getClientConfig('theme', 'dark'); export const getTheme = () => getClientConfig('theme', 'dark');
export const getAutoImageSearch = () => export const getAutoMediaSearch = () =>
Boolean(getClientConfig('autoImageSearch', 'true')); getClientConfig('autoMediaSearch', 'true') === 'true';
export const getAutoVideoSearch = () =>
Boolean(getClientConfig('autoVideoSearch', 'true'));
export const getSystemInstructions = () => export const getSystemInstructions = () =>
getClientConfig('systemInstructions', ''); getClientConfig('systemInstructions', '');

View File

@@ -13,14 +13,15 @@ class ConfigManager {
currentConfig: Config = { currentConfig: Config = {
version: this.configVersion, version: this.configVersion,
setupComplete: false, setupComplete: false,
general: {}, preferences: {},
personalization: {},
modelProviders: [], modelProviders: [],
search: { search: {
searxngURL: '', searxngURL: '',
}, },
}; };
uiConfigSections: UIConfigSections = { uiConfigSections: UIConfigSections = {
general: [ preferences: [
{ {
name: 'Theme', name: 'Theme',
key: 'theme', key: 'theme',
@@ -40,6 +41,46 @@ class ConfigManager {
default: 'dark', default: 'dark',
scope: 'client', scope: 'client',
}, },
{
name: 'Measurement Unit',
key: 'measureUnit',
type: 'select',
options: [
{
name: 'Imperial',
value: 'Imperial',
},
{
name: 'Metric',
value: 'Metric',
},
],
required: false,
description: 'Choose between Metric and Imperial measurement unit.',
default: 'Metric',
scope: 'client',
},
{
name: 'Auto video & image search',
key: 'autoMediaSearch',
type: 'switch',
required: false,
description: 'Automatically search for relevant images and videos.',
default: true,
scope: 'client',
},
],
personalization: [
{
name: 'System Instructions',
key: 'systemInstructions',
type: 'textarea',
required: false,
description: 'Add custom behavior or tone for the model.',
placeholder:
'e.g., "Respond in a friendly and concise tone" or "Use British English and format answers as bullet points."',
scope: 'client',
},
], ],
modelProviders: [], modelProviders: [],
search: [ search: [
@@ -124,7 +165,7 @@ class ConfigManager {
providerConfigSections.forEach((provider) => { providerConfigSections.forEach((provider) => {
const newProvider: ConfigModelProvider & { required?: string[] } = { const newProvider: ConfigModelProvider & { required?: string[] } = {
id: crypto.randomUUID(), id: crypto.randomUUID(),
name: `${provider.name} ${Math.floor(Math.random() * 1000)}`, name: `${provider.name}`,
type: provider.key, type: provider.key,
chatModels: [], chatModels: [],
embeddingModels: [], embeddingModels: [],

View File

@@ -32,10 +32,23 @@ type PasswordUIConfigField = BaseUIConfigField & {
default?: string; default?: string;
}; };
type TextareaUIConfigField = BaseUIConfigField & {
type: 'textarea';
placeholder?: string;
default?: string;
};
type SwitchUIConfigField = BaseUIConfigField & {
type: 'switch';
default?: boolean;
};
type UIConfigField = type UIConfigField =
| StringUIConfigField | StringUIConfigField
| SelectUIConfigField | SelectUIConfigField
| PasswordUIConfigField; | PasswordUIConfigField
| TextareaUIConfigField
| SwitchUIConfigField;
type ConfigModelProvider = { type ConfigModelProvider = {
id: string; id: string;
@@ -50,7 +63,10 @@ type ConfigModelProvider = {
type Config = { type Config = {
version: number; version: number;
setupComplete: boolean; setupComplete: boolean;
general: { preferences: {
[key: string]: any;
};
personalization: {
[key: string]: any; [key: string]: any;
}; };
modelProviders: ConfigModelProvider[]; modelProviders: ConfigModelProvider[];
@@ -73,7 +89,8 @@ type ModelProviderUISection = {
}; };
type UIConfigSections = { type UIConfigSections = {
general: UIConfigField[]; preferences: UIConfigField[];
personalization: UIConfigField[];
modelProviders: ModelProviderUISection[]; modelProviders: ModelProviderUISection[];
search: UIConfigField[]; search: UIConfigField[];
}; };
@@ -87,4 +104,6 @@ export type {
StringUIConfigField, StringUIConfigField,
ModelProviderUISection, ModelProviderUISection,
ConfigModelProvider, ConfigModelProvider,
TextareaUIConfigField,
SwitchUIConfigField,
}; };

View File

@@ -18,12 +18,18 @@ db.exec(`
`); `);
function sanitizeSql(content: string) { function sanitizeSql(content: string) {
return content const statements = content
.split(/--> statement-breakpoint/g)
.map((stmt) =>
stmt
.split(/\r?\n/) .split(/\r?\n/)
.filter( .filter((l) => !l.trim().startsWith('-->'))
(l) => !l.trim().startsWith('-->') && !l.includes('statement-breakpoint'), .join('\n')
.trim(),
) )
.join('\n'); .filter((stmt) => stmt.length > 0);
return statements;
} }
fs.readdirSync(migrationsFolder) fs.readdirSync(migrationsFolder)
@@ -32,7 +38,7 @@ fs.readdirSync(migrationsFolder)
.forEach((file) => { .forEach((file) => {
const filePath = path.join(migrationsFolder, file); const filePath = path.join(migrationsFolder, file);
let content = fs.readFileSync(filePath, 'utf-8'); let content = fs.readFileSync(filePath, 'utf-8');
content = sanitizeSql(content); const statements = sanitizeSql(content);
const migrationName = file.split('_')[0] || file; const migrationName = file.split('_')[0] || file;
@@ -108,7 +114,12 @@ fs.readdirSync(migrationsFolder)
db.exec('DROP TABLE messages;'); db.exec('DROP TABLE messages;');
db.exec('ALTER TABLE messages_with_sources RENAME TO messages;'); db.exec('ALTER TABLE messages_with_sources RENAME TO messages;');
} else { } else {
db.exec(content); // Execute each statement separately
statements.forEach((stmt) => {
if (stmt.trim()) {
db.exec(stmt);
}
});
} }
db.prepare('INSERT OR IGNORE INTO ran_migrations (name) VALUES (?)').run( db.prepare('INSERT OR IGNORE INTO ran_migrations (name) VALUES (?)').run(

View File

@@ -1,26 +1,23 @@
import { sql } from 'drizzle-orm'; import { sql } from 'drizzle-orm';
import { text, integer, sqliteTable } from 'drizzle-orm/sqlite-core'; import { text, integer, sqliteTable } from 'drizzle-orm/sqlite-core';
import { Document } from 'langchain/document'; import { Block } from '../types';
export const messages = sqliteTable('messages', { export const messages = sqliteTable('messages', {
id: integer('id').primaryKey(), id: integer('id').primaryKey(),
role: text('type', { enum: ['assistant', 'user', 'source'] }).notNull(),
chatId: text('chatId').notNull(),
createdAt: text('createdAt')
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
messageId: text('messageId').notNull(), messageId: text('messageId').notNull(),
chatId: text('chatId').notNull(),
content: text('content'), backendId: text('backendId').notNull(),
query: text('query').notNull(),
sources: text('sources', { createdAt: text('createdAt').notNull(),
mode: 'json', responseBlocks: text('responseBlocks', { mode: 'json' })
}) .$type<Block[]>()
.$type<Document[]>()
.default(sql`'[]'`), .default(sql`'[]'`),
status: text({ enum: ['answering', 'completed', 'error'] }).default(
'answering',
),
}); });
interface File { interface DBFile {
name: string; name: string;
fileId: string; fileId: string;
} }
@@ -31,6 +28,6 @@ export const chats = sqliteTable('chats', {
createdAt: text('createdAt').notNull(), createdAt: text('createdAt').notNull(),
focusMode: text('focusMode').notNull(), focusMode: text('focusMode').notNull(),
files: text('files', { mode: 'json' }) files: text('files', { mode: 'json' })
.$type<File[]>() .$type<DBFile[]>()
.default(sql`'[]'`), .default(sql`'[]'`),
}); });

View File

@@ -1,13 +1,7 @@
'use client'; 'use client';
import { import { Message } from '@/components/ChatWindow';
AssistantMessage, import { Block } from '@/lib/types';
ChatTurn,
Message,
SourceMessage,
SuggestionMessage,
UserMessage,
} from '@/components/ChatWindow';
import { import {
createContext, createContext,
useContext, useContext,
@@ -17,24 +11,25 @@ import {
useState, useState,
} from 'react'; } from 'react';
import crypto from 'crypto'; import crypto from 'crypto';
import { useSearchParams } from 'next/navigation'; import { useParams, useSearchParams } from 'next/navigation';
import { toast } from 'sonner'; import { toast } from 'sonner';
import { getSuggestions } from '../actions'; import { getSuggestions } from '../actions';
import { MinimalProvider } from '../models/types'; import { MinimalProvider } from '../models/types';
import { getAutoMediaSearch } from '../config/clientRegistry';
import { applyPatch } from 'rfc6902';
import { Widget } from '@/components/ChatWindow';
export type Section = { export type Section = {
userMessage: UserMessage; message: Message;
assistantMessage: AssistantMessage | undefined; widgets: Widget[];
parsedAssistantMessage: string | undefined; parsedTextBlocks: string[];
speechMessage: string | undefined; speechMessage: string;
sourceMessage: SourceMessage | undefined;
thinkingEnded: boolean; thinkingEnded: boolean;
suggestions?: string[]; suggestions?: string[];
}; };
type ChatContext = { type ChatContext = {
messages: Message[]; messages: Message[];
chatTurns: ChatTurn[];
sections: Section[]; sections: Section[];
chatHistory: [string, string][]; chatHistory: [string, string][];
files: File[]; files: File[];
@@ -48,6 +43,10 @@ type ChatContext = {
messageAppeared: boolean; messageAppeared: boolean;
isReady: boolean; isReady: boolean;
hasError: boolean; hasError: boolean;
chatModelProvider: ChatModelProvider;
embeddingModelProvider: EmbeddingModelProvider;
researchEnded: boolean;
setResearchEnded: (ended: boolean) => void;
setOptimizationMode: (mode: string) => void; setOptimizationMode: (mode: string) => void;
setFocusMode: (mode: string) => void; setFocusMode: (mode: string) => void;
setFiles: (files: File[]) => void; setFiles: (files: File[]) => void;
@@ -58,6 +57,8 @@ type ChatContext = {
rewrite?: boolean, rewrite?: boolean,
) => Promise<void>; ) => Promise<void>;
rewrite: (messageId: string) => void; rewrite: (messageId: string) => void;
setChatModelProvider: (provider: ChatModelProvider) => void;
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void;
}; };
export interface File { export interface File {
@@ -90,17 +91,6 @@ const checkConfig = async (
'embeddingModelProviderId', 'embeddingModelProviderId',
); );
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (!autoImageSearch) {
localStorage.setItem('autoImageSearch', 'true');
}
if (!autoVideoSearch) {
localStorage.setItem('autoVideoSearch', 'false');
}
const res = await fetch(`/api/providers`, { const res = await fetch(`/api/providers`, {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@@ -210,18 +200,26 @@ const loadMessages = async (
setMessages(messages); setMessages(messages);
const chatTurns = messages.filter( const history: [string, string][] = [];
(msg): msg is ChatTurn => msg.role === 'user' || msg.role === 'assistant', messages.forEach((msg) => {
); history.push(['human', msg.query]);
const history = chatTurns.map((msg) => { const textBlocks = msg.responseBlocks
return [msg.role, msg.content]; .filter(
}) as [string, string][]; (block): block is Block & { type: 'text' } => block.type === 'text',
)
.map((block) => block.data)
.join('\n');
if (textBlocks) {
history.push(['assistant', textBlocks]);
}
});
console.debug(new Date(), 'app:messages_loaded'); console.debug(new Date(), 'app:messages_loaded');
if (chatTurns.length > 0) { if (messages.length > 0) {
document.title = chatTurns[0].content; document.title = messages[0].query;
} }
const files = data.chat.files.map((file: any) => { const files = data.chat.files.map((file: any) => {
@@ -252,34 +250,36 @@ export const chatContext = createContext<ChatContext>({
loading: false, loading: false,
messageAppeared: false, messageAppeared: false,
messages: [], messages: [],
chatTurns: [],
sections: [], sections: [],
notFound: false, notFound: false,
optimizationMode: '', optimizationMode: '',
chatModelProvider: { key: '', providerId: '' },
embeddingModelProvider: { key: '', providerId: '' },
researchEnded: false,
rewrite: () => {}, rewrite: () => {},
sendMessage: async () => {}, sendMessage: async () => {},
setFileIds: () => {}, setFileIds: () => {},
setFiles: () => {}, setFiles: () => {},
setFocusMode: () => {}, setFocusMode: () => {},
setOptimizationMode: () => {}, setOptimizationMode: () => {},
setChatModelProvider: () => {},
setEmbeddingModelProvider: () => {},
setResearchEnded: () => {},
}); });
export const ChatProvider = ({ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
children, const params: { chatId: string } = useParams();
id,
}: {
children: React.ReactNode;
id?: string;
}) => {
const searchParams = useSearchParams(); const searchParams = useSearchParams();
const initialMessage = searchParams.get('q'); const initialMessage = searchParams.get('q');
const [chatId, setChatId] = useState<string | undefined>(id); const [chatId, setChatId] = useState<string | undefined>(params.chatId);
const [newChatCreated, setNewChatCreated] = useState(false); const [newChatCreated, setNewChatCreated] = useState(false);
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [messageAppeared, setMessageAppeared] = useState(false); const [messageAppeared, setMessageAppeared] = useState(false);
const [researchEnded, setResearchEnded] = useState(false);
const [chatHistory, setChatHistory] = useState<[string, string][]>([]); const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
const [messages, setMessages] = useState<Message[]>([]); const [messages, setMessages] = useState<Message[]>([]);
@@ -312,66 +312,44 @@ export const ChatProvider = ({
const messagesRef = useRef<Message[]>([]); const messagesRef = useRef<Message[]>([]);
const chatTurns = useMemo((): ChatTurn[] => {
return messages.filter(
(msg): msg is ChatTurn => msg.role === 'user' || msg.role === 'assistant',
);
}, [messages]);
const sections = useMemo<Section[]>(() => { const sections = useMemo<Section[]>(() => {
const sections: Section[] = []; return messages.map((msg) => {
const textBlocks: string[] = [];
messages.forEach((msg, i) => { let speechMessage = '';
if (msg.role === 'user') {
const nextUserMessageIndex = messages.findIndex(
(m, j) => j > i && m.role === 'user',
);
const aiMessage = messages.find(
(m, j) =>
j > i &&
m.role === 'assistant' &&
(nextUserMessageIndex === -1 || j < nextUserMessageIndex),
) as AssistantMessage | undefined;
const sourceMessage = messages.find(
(m, j) =>
j > i &&
m.role === 'source' &&
m.sources &&
(nextUserMessageIndex === -1 || j < nextUserMessageIndex),
) as SourceMessage | undefined;
let thinkingEnded = false; let thinkingEnded = false;
let processedMessage = aiMessage?.content ?? '';
let speechMessage = aiMessage?.content ?? '';
let suggestions: string[] = []; let suggestions: string[] = [];
if (aiMessage) { const sourceBlocks = msg.responseBlocks.filter(
(block): block is Block & { type: 'source' } => block.type === 'source',
);
const sources = sourceBlocks.flatMap((block) => block.data);
const widgetBlocks = msg.responseBlocks
.filter((b) => b.type === 'widget')
.map((b) => b.data) as Widget[];
msg.responseBlocks.forEach((block) => {
if (block.type === 'text') {
let processedText = block.data;
const citationRegex = /\[([^\]]+)\]/g; const citationRegex = /\[([^\]]+)\]/g;
const regex = /\[(\d+)\]/g; const regex = /\[(\d+)\]/g;
if (processedMessage.includes('<think>')) { if (processedText.includes('<think>')) {
const openThinkTag = const openThinkTag = processedText.match(/<think>/g)?.length || 0;
processedMessage.match(/<think>/g)?.length || 0;
const closeThinkTag = const closeThinkTag =
processedMessage.match(/<\/think>/g)?.length || 0; processedText.match(/<\/think>/g)?.length || 0;
if (openThinkTag && !closeThinkTag) { if (openThinkTag && !closeThinkTag) {
processedMessage += '</think> <a> </a>'; processedText += '</think> <a> </a>';
} }
} }
if (aiMessage.content.includes('</think>')) { if (block.data.includes('</think>')) {
thinkingEnded = true; thinkingEnded = true;
} }
if ( if (sources.length > 0) {
sourceMessage && processedText = processedText.replace(
sourceMessage.sources &&
sourceMessage.sources.length > 0
) {
processedMessage = processedMessage.replace(
citationRegex, citationRegex,
(_, capturedContent: string) => { (_, capturedContent: string) => {
const numbers = capturedContent const numbers = capturedContent
@@ -386,7 +364,7 @@ export const ChatProvider = ({
return `[${numStr}]`; return `[${numStr}]`;
} }
const source = sourceMessage.sources?.[number - 1]; const source = sources[number - 1];
const url = source?.metadata?.url; const url = source?.metadata?.url;
if (url) { if (url) {
@@ -400,37 +378,27 @@ export const ChatProvider = ({
return linksHtml; return linksHtml;
}, },
); );
speechMessage = aiMessage.content.replace(regex, ''); speechMessage += block.data.replace(regex, '');
} else { } else {
processedMessage = processedMessage.replace(regex, ''); processedText = processedText.replace(regex, '');
speechMessage = aiMessage.content.replace(regex, ''); speechMessage += block.data.replace(regex, '');
} }
const suggestionMessage = messages.find( textBlocks.push(processedText);
(m, j) => } else if (block.type === 'suggestion') {
j > i && suggestions = block.data;
m.role === 'suggestion' &&
(nextUserMessageIndex === -1 || j < nextUserMessageIndex),
) as SuggestionMessage | undefined;
if (suggestionMessage && suggestionMessage.suggestions.length > 0) {
suggestions = suggestionMessage.suggestions;
}
} }
});
sections.push({ return {
userMessage: msg, message: msg,
assistantMessage: aiMessage, parsedTextBlocks: textBlocks,
sourceMessage: sourceMessage,
parsedAssistantMessage: processedMessage,
speechMessage, speechMessage,
thinkingEnded, thinkingEnded,
suggestions: suggestions, suggestions,
widgets: widgetBlocks,
};
}); });
}
});
return sections;
}, [messages]); }, [messages]);
useEffect(() => { useEffect(() => {
@@ -443,6 +411,19 @@ export const ChatProvider = ({
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
useEffect(() => {
if (params.chatId && params.chatId !== chatId) {
setChatId(params.chatId);
setMessages([]);
setChatHistory([]);
setFiles([]);
setFileIds([]);
setIsMessagesLoaded(false);
setNotFound(false);
setNewChatCreated(false);
}
}, [params.chatId, chatId]);
useEffect(() => { useEffect(() => {
if ( if (
chatId && chatId &&
@@ -466,7 +447,7 @@ export const ChatProvider = ({
setChatId(crypto.randomBytes(20).toString('hex')); setChatId(crypto.randomBytes(20).toString('hex'));
} }
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, [chatId, isMessagesLoaded, newChatCreated, messages.length]);
useEffect(() => { useEffect(() => {
messagesRef.current = messages; messagesRef.current = messages;
@@ -483,24 +464,17 @@ export const ChatProvider = ({
const rewrite = (messageId: string) => { const rewrite = (messageId: string) => {
const index = messages.findIndex((msg) => msg.messageId === messageId); const index = messages.findIndex((msg) => msg.messageId === messageId);
const chatTurnsIndex = chatTurns.findIndex(
(msg) => msg.messageId === messageId,
);
if (index === -1) return; if (index === -1) return;
const message = chatTurns[chatTurnsIndex - 1]; setMessages((prev) => prev.slice(0, index));
setMessages((prev) => {
return [
...prev.slice(0, messages.length > 2 ? messages.indexOf(message) : 0),
];
});
setChatHistory((prev) => { setChatHistory((prev) => {
return [...prev.slice(0, chatTurns.length > 2 ? chatTurnsIndex - 1 : 0)]; return prev.slice(0, index * 2);
}); });
sendMessage(message.content, message.messageId, true); const messageToRewrite = messages[index];
sendMessage(messageToRewrite.query, messageToRewrite.messageId, true);
}; };
useEffect(() => { useEffect(() => {
@@ -519,142 +493,215 @@ export const ChatProvider = ({
messageId, messageId,
rewrite = false, rewrite = false,
) => { ) => {
if (loading) return; if (loading || !message) return;
setLoading(true); setLoading(true);
setResearchEnded(false);
setMessageAppeared(false); setMessageAppeared(false);
if (messages.length <= 1) { if (messages.length <= 1) {
window.history.replaceState(null, '', `/c/${chatId}`); window.history.replaceState(null, '', `/c/${chatId}`);
} }
let recievedMessage = '';
let added = false;
messageId = messageId ?? crypto.randomBytes(7).toString('hex'); messageId = messageId ?? crypto.randomBytes(7).toString('hex');
const backendId = crypto.randomBytes(20).toString('hex');
setMessages((prevMessages) => [ const newMessage: Message = {
...prevMessages, messageId,
{
content: message,
messageId: messageId,
chatId: chatId!, chatId: chatId!,
role: 'user', backendId,
query: message,
responseBlocks: [],
status: 'answering',
createdAt: new Date(), createdAt: new Date(),
}, };
]);
setMessages((prevMessages) => [...prevMessages, newMessage]);
const receivedTextRef = { current: '' };
const messageHandler = async (data: any) => { const messageHandler = async (data: any) => {
if (data.type === 'error') { if (data.type === 'error') {
toast.error(data.data); toast.error(data.data);
setLoading(false); setLoading(false);
setMessages((prev) =>
prev.map((msg) =>
msg.messageId === messageId
? { ...msg, status: 'error' as const }
: msg,
),
);
return; return;
} }
if (data.type === 'researchComplete') {
setResearchEnded(true);
if (
newMessage.responseBlocks.find(
(b) => b.type === 'source' && b.data.length > 0,
)
) {
setMessageAppeared(true);
}
}
if (data.type === 'block') {
setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === messageId) {
return {
...msg,
responseBlocks: [...msg.responseBlocks, data.block],
};
}
return msg;
}),
);
}
if (data.type === 'updateBlock') {
setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === messageId) {
const updatedBlocks = msg.responseBlocks.map((block) => {
if (block.id === data.blockId) {
const updatedBlock = { ...block };
applyPatch(updatedBlock, data.patch);
return updatedBlock;
}
return block;
});
return { ...msg, responseBlocks: updatedBlocks };
}
return msg;
}),
);
}
if (data.type === 'sources') { if (data.type === 'sources') {
setMessages((prevMessages) => [ const sourceBlock: Block = {
...prevMessages, id: crypto.randomBytes(7).toString('hex'),
{ type: 'source',
messageId: data.messageId, data: data.data,
chatId: chatId!, };
role: 'source',
sources: data.data, setMessages((prev) =>
createdAt: new Date(), prev.map((msg) => {
}, if (msg.messageId === messageId) {
]); return {
...msg,
responseBlocks: [...msg.responseBlocks, sourceBlock],
};
}
return msg;
}),
);
if (data.data.length > 0) { if (data.data.length > 0) {
setMessageAppeared(true); setMessageAppeared(true);
} }
} }
if (data.type === 'message') { if (data.type === 'message') {
if (!added) { receivedTextRef.current += data.data;
setMessages((prevMessages) => [
...prevMessages,
{
content: data.data,
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
createdAt: new Date(),
},
]);
added = true;
setMessageAppeared(true);
} else {
setMessages((prev) =>
prev.map((message) => {
if (
message.messageId === data.messageId &&
message.role === 'assistant'
) {
return { ...message, content: message.content + data.data };
}
return message; setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === messageId) {
const existingTextBlockIndex = msg.responseBlocks.findIndex(
(b) => b.type === 'text',
);
if (existingTextBlockIndex >= 0) {
const updatedBlocks = [...msg.responseBlocks];
const existingBlock = updatedBlocks[
existingTextBlockIndex
] as Block & { type: 'text' };
updatedBlocks[existingTextBlockIndex] = {
...existingBlock,
data: existingBlock.data + data.data,
};
return { ...msg, responseBlocks: updatedBlocks };
} else {
const textBlock: Block = {
id: crypto.randomBytes(7).toString('hex'),
type: 'text',
data: data.data,
};
return {
...msg,
responseBlocks: [...msg.responseBlocks, textBlock],
};
}
}
return msg;
}), }),
); );
} setMessageAppeared(true);
recievedMessage += data.data;
} }
if (data.type === 'messageEnd') { if (data.type === 'messageEnd') {
setChatHistory((prevHistory) => [ const newHistory: [string, string][] = [
...prevHistory, ...chatHistory,
['human', message], ['human', message],
['assistant', recievedMessage], ['assistant', receivedTextRef.current],
]); ];
setChatHistory(newHistory);
setMessages((prev) =>
prev.map((msg) =>
msg.messageId === messageId
? { ...msg, status: 'completed' as const }
: msg,
),
);
setLoading(false); setLoading(false);
const lastMsg = messagesRef.current[messagesRef.current.length - 1]; const lastMsg = messagesRef.current[messagesRef.current.length - 1];
const autoImageSearch = localStorage.getItem('autoImageSearch'); const autoMediaSearch = getAutoMediaSearch();
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (autoImageSearch === 'true') { if (autoMediaSearch) {
document document
.getElementById(`search-images-${lastMsg.messageId}`) .getElementById(`search-images-${lastMsg.messageId}`)
?.click(); ?.click();
}
if (autoVideoSearch === 'true') {
document document
.getElementById(`search-videos-${lastMsg.messageId}`) .getElementById(`search-videos-${lastMsg.messageId}`)
?.click(); ?.click();
} }
/* Check if there are sources after message id's index and no suggestions */ // Check if there are sources and no suggestions
const currentMsg = messagesRef.current.find(
const userMessageIndex = messagesRef.current.findIndex( (msg) => msg.messageId === messageId,
(msg) => msg.messageId === messageId && msg.role === 'user',
); );
const sourceMessage = messagesRef.current.find( const hasSourceBlocks = currentMsg?.responseBlocks.some(
(msg, i) => i > userMessageIndex && msg.role === 'source', (block) => block.type === 'source' && block.data.length > 0,
) as SourceMessage | undefined; );
const hasSuggestions = currentMsg?.responseBlocks.some(
const suggestionMessageIndex = messagesRef.current.findIndex( (block) => block.type === 'suggestion',
(msg, i) => i > userMessageIndex && msg.role === 'suggestion',
); );
if ( if (hasSourceBlocks && !hasSuggestions) {
sourceMessage && const suggestions = await getSuggestions(newHistory);
sourceMessage.sources.length > 0 && const suggestionBlock: Block = {
suggestionMessageIndex == -1 id: crypto.randomBytes(7).toString('hex'),
) { type: 'suggestion',
const suggestions = await getSuggestions(messagesRef.current); data: suggestions,
setMessages((prev) => { };
return [
...prev, setMessages((prev) =>
{ prev.map((msg) => {
role: 'suggestion', if (msg.messageId === messageId) {
suggestions: suggestions, return {
chatId: chatId!, ...msg,
createdAt: new Date(), responseBlocks: [...msg.responseBlocks, suggestionBlock],
messageId: crypto.randomBytes(7).toString('hex'), };
}, }
]; return msg;
}); }),
);
} }
} }
}; };
@@ -723,7 +770,6 @@ export const ChatProvider = ({
<chatContext.Provider <chatContext.Provider
value={{ value={{
messages, messages,
chatTurns,
sections, sections,
chatHistory, chatHistory,
files, files,
@@ -743,6 +789,12 @@ export const ChatProvider = ({
setOptimizationMode, setOptimizationMode,
rewrite, rewrite,
sendMessage, sendMessage,
setChatModelProvider,
chatModelProvider,
embeddingModelProvider,
setEmbeddingModelProvider,
researchEnded,
setResearchEnded,
}} }}
> >
{children} {children}

View File

@@ -1,78 +0,0 @@
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings';
import { chunkArray } from '@langchain/core/utils/chunk_array';
export interface HuggingFaceTransformersEmbeddingsParams
extends EmbeddingsParams {
modelName: string;
model: string;
timeout?: number;
batchSize?: number;
stripNewLines?: boolean;
}
export class HuggingFaceTransformersEmbeddings
extends Embeddings
implements HuggingFaceTransformersEmbeddingsParams
{
modelName = 'Xenova/all-MiniLM-L6-v2';
model = 'Xenova/all-MiniLM-L6-v2';
batchSize = 512;
stripNewLines = true;
timeout?: number;
private pipelinePromise: Promise<any> | undefined;
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParams>) {
super(fields ?? {});
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
this.model = this.modelName;
this.stripNewLines = fields?.stripNewLines ?? this.stripNewLines;
this.timeout = fields?.timeout;
}
async embedDocuments(texts: string[]): Promise<number[][]> {
const batches = chunkArray(
this.stripNewLines ? texts.map((t) => t.replace(/\n/g, ' ')) : texts,
this.batchSize,
);
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
const batchResponses = await Promise.all(batchRequests);
const embeddings: number[][] = [];
for (let i = 0; i < batchResponses.length; i += 1) {
const batchResponse = batchResponses[i];
for (let j = 0; j < batchResponse.length; j += 1) {
embeddings.push(batchResponse[j]);
}
}
return embeddings;
}
async embedQuery(text: string): Promise<number[]> {
const data = await this.runEmbedding([
this.stripNewLines ? text.replace(/\n/g, ' ') : text,
]);
return data[0];
}
private async runEmbedding(texts: string[]) {
const { pipeline } = await import('@huggingface/transformers');
const pipe = await pipeline('feature-extraction', this.model);
return this.caller.call(async () => {
const output = await pipe(texts, { pooling: 'mean', normalize: true });
return output.tolist();
});
}
}

View File

@@ -0,0 +1,9 @@
import { Chunk } from '@/lib/types';
abstract class BaseEmbedding<CONFIG> {
constructor(protected config: CONFIG) {}
abstract embedText(texts: string[]): Promise<number[][]>;
abstract embedChunks(chunks: Chunk[]): Promise<number[][]>;
}
export default BaseEmbedding;

View File

@@ -0,0 +1,22 @@
import {
GenerateObjectInput,
GenerateOptions,
GenerateTextInput,
GenerateTextOutput,
StreamTextOutput,
} from '../types';
abstract class BaseLLM<CONFIG> {
constructor(protected config: CONFIG) {}
abstract withOptions(options: GenerateOptions): this;
abstract generateText(input: GenerateTextInput): Promise<GenerateTextOutput>;
abstract streamText(
input: GenerateTextInput,
): AsyncGenerator<StreamTextOutput>;
abstract generateObject<T>(input: GenerateObjectInput): Promise<T>;
abstract streamObject<T>(
input: GenerateObjectInput,
): AsyncGenerator<Partial<T>>;
}
export default BaseLLM;

View File

@@ -1,7 +1,7 @@
import { Embeddings } from '@langchain/core/embeddings'; import { ModelList, ProviderMetadata } from '../types';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import { UIConfigField } from '@/lib/config/types'; import { UIConfigField } from '@/lib/config/types';
import BaseLLM from './llm';
import BaseEmbedding from './embedding';
abstract class BaseModelProvider<CONFIG> { abstract class BaseModelProvider<CONFIG> {
constructor( constructor(
@@ -11,8 +11,8 @@ abstract class BaseModelProvider<CONFIG> {
) {} ) {}
abstract getDefaultModels(): Promise<ModelList>; abstract getDefaultModels(): Promise<ModelList>;
abstract getModelList(): Promise<ModelList>; abstract getModelList(): Promise<ModelList>;
abstract loadChatModel(modelName: string): Promise<BaseChatModel>; abstract loadChatModel(modelName: string): Promise<BaseLLM<any>>;
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>; abstract loadEmbeddingModel(modelName: string): Promise<BaseEmbedding<any>>;
static getProviderConfigFields(): UIConfigField[] { static getProviderConfigFields(): UIConfigField[] {
throw new Error('Method not implemented.'); throw new Error('Method not implemented.');
} }

View File

@@ -1,13 +1,11 @@
import { ModelProviderUISection } from '@/lib/config/types'; import { ModelProviderUISection } from '@/lib/config/types';
import { ProviderConstructor } from './baseProvider'; import { ProviderConstructor } from '../base/provider';
import OpenAIProvider from './openai'; import OpenAIProvider from './openai';
import OllamaProvider from './ollama'; import OllamaProvider from './ollama';
import TransformersProvider from './transformers';
export const providers: Record<string, ProviderConstructor<any>> = { export const providers: Record<string, ProviderConstructor<any>> = {
openai: OpenAIProvider, openai: OpenAIProvider,
ollama: OllamaProvider, ollama: OllamaProvider,
transformers: TransformersProvider,
}; };
export const getModelProvidersUIConfigSection = export const getModelProvidersUIConfigSection =

View File

@@ -1,10 +1,11 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import BaseModelProvider from './baseProvider';
import { ChatOllama, OllamaEmbeddings } from '@langchain/ollama';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types'; import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import BaseModelProvider from '../../base/provider';
import { Model, ModelList, ProviderMetadata } from '../../types';
import BaseLLM from '../../base/llm';
import BaseEmbedding from '../../base/embedding';
import OllamaLLM from './ollamaLLM';
import OllamaEmbedding from './ollamaEmbedding';
interface OllamaConfig { interface OllamaConfig {
baseURL: string; baseURL: string;
@@ -76,7 +77,7 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
}; };
} }
async loadChatModel(key: string): Promise<BaseChatModel> { async loadChatModel(key: string): Promise<BaseLLM<any>> {
const modelList = await this.getModelList(); const modelList = await this.getModelList();
const exists = modelList.chat.find((m) => m.key === key); const exists = modelList.chat.find((m) => m.key === key);
@@ -87,14 +88,13 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
); );
} }
return new ChatOllama({ return new OllamaLLM({
temperature: 0.7, baseURL: this.config.baseURL,
model: key, model: key,
baseUrl: this.config.baseURL,
}); });
} }
async loadEmbeddingModel(key: string): Promise<Embeddings> { async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
const modelList = await this.getModelList(); const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key); const exists = modelList.embedding.find((m) => m.key === key);
@@ -104,9 +104,9 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
); );
} }
return new OllamaEmbeddings({ return new OllamaEmbedding({
model: key, model: key,
baseUrl: this.config.baseURL, baseURL: this.config.baseURL,
}); });
} }

View File

@@ -0,0 +1,40 @@
import { Ollama } from 'ollama';
import BaseEmbedding from '../../base/embedding';
import { Chunk } from '@/lib/types';
type OllamaConfig = {
model: string;
baseURL?: string;
};
class OllamaEmbedding extends BaseEmbedding<OllamaConfig> {
ollamaClient: Ollama;
constructor(protected config: OllamaConfig) {
super(config);
this.ollamaClient = new Ollama({
host: this.config.baseURL || 'http://localhost:11434',
});
}
async embedText(texts: string[]): Promise<number[][]> {
const response = await this.ollamaClient.embed({
input: texts,
model: this.config.model,
});
return response.embeddings;
}
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
const response = await this.ollamaClient.embed({
input: chunks.map((c) => c.content),
model: this.config.model,
});
return response.embeddings;
}
}
export default OllamaEmbedding;

View File

@@ -0,0 +1,153 @@
import z from 'zod';
import BaseLLM from '../../base/llm';
import {
GenerateObjectInput,
GenerateOptions,
GenerateTextInput,
GenerateTextOutput,
StreamTextOutput,
} from '../../types';
import { Ollama } from 'ollama';
import { parse } from 'partial-json';
type OllamaConfig = {
baseURL: string;
model: string;
options?: GenerateOptions;
};
class OllamaLLM extends BaseLLM<OllamaConfig> {
ollamaClient: Ollama;
constructor(protected config: OllamaConfig) {
super(config);
this.ollamaClient = new Ollama({
host: this.config.baseURL || 'http://localhost:11434',
});
}
withOptions(options: GenerateOptions) {
this.config.options = {
...this.config.options,
...options,
};
return this;
}
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
this.withOptions(input.options || {});
const res = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
options: {
top_p: this.config.options?.topP,
temperature: this.config.options?.temperature,
num_predict: this.config.options?.maxTokens,
num_ctx: 32000,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
return {
content: res.message.content,
additionalInfo: {
reasoning: res.message.thinking,
},
};
}
async *streamText(
input: GenerateTextInput,
): AsyncGenerator<StreamTextOutput> {
this.withOptions(input.options || {});
const stream = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
stream: true,
options: {
top_p: this.config.options?.topP,
temperature: this.config.options?.temperature,
num_ctx: 32000,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
for await (const chunk of stream) {
yield {
contentChunk: chunk.message.content,
done: chunk.done,
additionalInfo: {
reasoning: chunk.message.thinking,
},
};
}
}
async generateObject<T>(input: GenerateObjectInput): Promise<T> {
this.withOptions(input.options || {});
const response = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
format: z.toJSONSchema(input.schema),
think: false,
options: {
top_p: this.config.options?.topP,
temperature: 0.7,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
try {
return input.schema.parse(JSON.parse(response.message.content)) as T;
} catch (err) {
throw new Error(`Error parsing response from Ollama: ${err}`);
}
}
async *streamObject<T>(input: GenerateObjectInput): AsyncGenerator<T> {
let recievedObj: string = '';
this.withOptions(input.options || {});
const stream = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
format: z.toJSONSchema(input.schema),
stream: true,
think: false,
options: {
top_p: this.config.options?.topP,
temperature: 0.7,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
for await (const chunk of stream) {
recievedObj += chunk.message.content;
try {
yield parse(recievedObj) as T;
} catch (err) {
console.log('Error parsing partial object from Ollama:', err);
yield {} as T;
}
}
}
}
export default OllamaLLM;

View File

@@ -1,10 +1,11 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import BaseModelProvider from './baseProvider';
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types'; import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import { Model, ModelList, ProviderMetadata } from '../../types';
import OpenAIEmbedding from './openaiEmbedding';
import BaseEmbedding from '../../base/embedding';
import BaseModelProvider from '../../base/provider';
import BaseLLM from '../../base/llm';
import OpenAILLM from './openaiLLM';
interface OpenAIConfig { interface OpenAIConfig {
apiKey: string; apiKey: string;
@@ -145,7 +146,7 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
}; };
} }
async loadChatModel(key: string): Promise<BaseChatModel> { async loadChatModel(key: string): Promise<BaseLLM<any>> {
const modelList = await this.getModelList(); const modelList = await this.getModelList();
const exists = modelList.chat.find((m) => m.key === key); const exists = modelList.chat.find((m) => m.key === key);
@@ -156,17 +157,14 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
); );
} }
return new ChatOpenAI({ return new OpenAILLM({
apiKey: this.config.apiKey, apiKey: this.config.apiKey,
temperature: 0.7,
model: key, model: key,
configuration: {
baseURL: this.config.baseURL, baseURL: this.config.baseURL,
},
}); });
} }
async loadEmbeddingModel(key: string): Promise<Embeddings> { async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
const modelList = await this.getModelList(); const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key); const exists = modelList.embedding.find((m) => m.key === key);
@@ -176,12 +174,10 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
); );
} }
return new OpenAIEmbeddings({ return new OpenAIEmbedding({
apiKey: this.config.apiKey, apiKey: this.config.apiKey,
model: key, model: key,
configuration: {
baseURL: this.config.baseURL, baseURL: this.config.baseURL,
},
}); });
} }

View File

@@ -0,0 +1,42 @@
import OpenAI from 'openai';
import BaseEmbedding from '../../base/embedding';
import { Chunk } from '@/lib/types';
type OpenAIConfig = {
apiKey: string;
model: string;
baseURL?: string;
};
class OpenAIEmbedding extends BaseEmbedding<OpenAIConfig> {
openAIClient: OpenAI;
constructor(protected config: OpenAIConfig) {
super(config);
this.openAIClient = new OpenAI({
apiKey: config.apiKey,
baseURL: config.baseURL,
});
}
async embedText(texts: string[]): Promise<number[][]> {
const response = await this.openAIClient.embeddings.create({
model: this.config.model,
input: texts,
});
return response.data.map((embedding) => embedding.embedding);
}
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
const response = await this.openAIClient.embeddings.create({
model: this.config.model,
input: chunks.map((c) => c.content),
});
return response.data.map((embedding) => embedding.embedding);
}
}
export default OpenAIEmbedding;

Some files were not shown because too many files have changed in this diff Show More