mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2026-01-11 20:25:41 +00:00
Compare commits
118 Commits
6016090f12
...
v1.12.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3b8d8be676 | ||
|
|
b83f9bac78 | ||
|
|
bd7c563137 | ||
|
|
23b903db9a | ||
|
|
a98f0df83f | ||
|
|
164d528761 | ||
|
|
af4ec17117 | ||
|
|
1622e0893a | ||
|
|
55a4b9d436 | ||
|
|
b450d0e668 | ||
|
|
0987ee4370 | ||
|
|
d1bd22786d | ||
|
|
bb7b7170ca | ||
|
|
be7bd62a74 | ||
|
|
a691f3bab0 | ||
|
|
f1c9fa0e33 | ||
|
|
d872cf5009 | ||
|
|
fdef718980 | ||
|
|
19dde42f22 | ||
|
|
c9f6893d99 | ||
|
|
53e9859b6c | ||
|
|
53e39cd985 | ||
|
|
7f3f881964 | ||
|
|
9620e63e3f | ||
|
|
ec5ff6f4a8 | ||
|
|
0ace778b03 | ||
|
|
6919ad1a0f | ||
|
|
b5ba8c48c0 | ||
|
|
65fdecb122 | ||
|
|
5a44319d85 | ||
|
|
cc183cd0cd | ||
|
|
50ca7ac73a | ||
|
|
a31a4ab295 | ||
|
|
edba47aed8 | ||
|
|
ae132ebee8 | ||
|
|
60dd7a8108 | ||
|
|
f5e054f6ea | ||
|
|
452180356d | ||
|
|
0a9641a110 | ||
|
|
a2f2e17bbb | ||
|
|
e1afcbb787 | ||
|
|
fe2c1b8210 | ||
|
|
d40fcd57d9 | ||
|
|
86a43086cc | ||
|
|
9ce17edd4a | ||
|
|
c4349f3d5c | ||
|
|
d4c276ab93 | ||
|
|
6ae885e0ed | ||
|
|
dc74e7174f | ||
|
|
53697bb42e | ||
|
|
eca66f0b5f | ||
|
|
cf95ea0af7 | ||
|
|
24c32ed881 | ||
|
|
b47f522bf2 | ||
|
|
ea18c13326 | ||
|
|
b706434bac | ||
|
|
2c65bd916b | ||
|
|
c3b74a3fd0 | ||
|
|
5f04034650 | ||
|
|
5847379db0 | ||
|
|
8520ea6fe5 | ||
|
|
a6d4f47130 | ||
|
|
f278eb8bf1 | ||
|
|
0e176e0b78 | ||
|
|
8ba64be446 | ||
|
|
216332fb20 | ||
|
|
68a9e048ac | ||
|
|
13d6bcf113 | ||
|
|
94a24d4058 | ||
|
|
300cfa35c7 | ||
|
|
85273493a0 | ||
|
|
6e2345bd2d | ||
|
|
fdee29c93e | ||
|
|
21cb0f5fd9 | ||
|
|
a82b605c70 | ||
|
|
64683e3dec | ||
|
|
604774ef6e | ||
|
|
ac183a90e8 | ||
|
|
5511a276d4 | ||
|
|
473a04b6a5 | ||
|
|
491136822f | ||
|
|
6e086953b1 | ||
|
|
1961e4e707 | ||
|
|
249889f55a | ||
|
|
9b2c229e9c | ||
|
|
4bdb90e150 | ||
|
|
f9cc97ffb5 | ||
|
|
9dd670f46a | ||
|
|
bd3c5f895a | ||
|
|
e6c8a0aa6f | ||
|
|
b90b92079b | ||
|
|
a3065d58ef | ||
|
|
ca4809f0f2 | ||
|
|
3d1d164f68 | ||
|
|
a99702d837 | ||
|
|
60675955e4 | ||
|
|
a6ff94d030 | ||
|
|
748ee4d3c2 | ||
|
|
1f3bf8da32 | ||
|
|
8d471ac40e | ||
|
|
40b25a487b | ||
|
|
3949748bbd | ||
|
|
56e47d6c39 | ||
|
|
fd745577d6 | ||
|
|
86ea3cde7e | ||
|
|
aeb90cb137 | ||
|
|
6473e51fde | ||
|
|
c7c327a7bb | ||
|
|
0688630863 | ||
|
|
0b9e193ed1 | ||
|
|
8d1b04e05f | ||
|
|
ff4cf98b50 | ||
|
|
13ae0b9451 | ||
|
|
0cfa01422c | ||
|
|
fdaa2f0646 | ||
|
|
fc0c444b6a | ||
|
|
01b537ade1 | ||
|
|
3bffc72422 |
@@ -11,33 +11,63 @@ Perplexica's codebase is organized as follows:
|
||||
- **UI Components and Pages**:
|
||||
- **Components (`src/components`)**: Reusable UI components.
|
||||
- **Pages and Routes (`src/app`)**: Next.js app directory structure with page components.
|
||||
- Main app routes include: home (`/`), chat (`/c`), discover (`/discover`), library (`/library`), and settings (`/settings`).
|
||||
- **API Routes (`src/app/api`)**: API endpoints implemented with Next.js API routes.
|
||||
- `/api/chat`: Handles chat interactions.
|
||||
- `/api/search`: Provides direct access to Perplexica's search capabilities.
|
||||
- Other endpoints for models, files, and suggestions.
|
||||
- Main app routes include: home (`/`), chat (`/c`), discover (`/discover`), and library (`/library`).
|
||||
- **API Routes (`src/app/api`)**: Server endpoints implemented with Next.js route handlers.
|
||||
- **Backend Logic (`src/lib`)**: Contains all the backend functionality including search, database, and API logic.
|
||||
- The search functionality is present inside `src/lib/search` directory.
|
||||
- All of the focus modes are implemented using the Meta Search Agent class in `src/lib/search/metaSearchAgent.ts`.
|
||||
- The search system lives in `src/lib/agents/search`.
|
||||
- The search pipeline is split into classification, research, widgets, and writing.
|
||||
- Database functionality is in `src/lib/db`.
|
||||
- Chat model and embedding model providers are managed in `src/lib/providers`.
|
||||
- Prompt templates and LLM chain definitions are in `src/lib/prompts` and `src/lib/chains` respectively.
|
||||
- Chat model and embedding model providers are in `src/lib/models/providers`, and models are loaded via `src/lib/models/registry.ts`.
|
||||
- Prompt templates are in `src/lib/prompts`.
|
||||
- SearXNG integration is in `src/lib/searxng.ts`.
|
||||
- Upload search lives in `src/lib/uploads`.
|
||||
|
||||
### Where to make changes
|
||||
|
||||
If you are not sure where to start, use this section as a map.
|
||||
|
||||
- **Search behavior and reasoning**
|
||||
|
||||
- `src/lib/agents/search` contains the core chat and search pipeline.
|
||||
- `classifier.ts` decides whether research is needed and what should run.
|
||||
- `researcher/` gathers information in the background.
|
||||
|
||||
- **Add or change a search capability**
|
||||
|
||||
- Research tools (web, academic, discussions, uploads, scraping) live in `src/lib/agents/search/researcher/actions`.
|
||||
- Tools are registered in `src/lib/agents/search/researcher/actions/index.ts`.
|
||||
|
||||
- **Add or change widgets**
|
||||
|
||||
- Widgets live in `src/lib/agents/search/widgets`.
|
||||
- Widgets run in parallel with research and show structured results in the UI.
|
||||
|
||||
- **Model integrations**
|
||||
|
||||
- Providers live in `src/lib/models/providers`.
|
||||
- Add new providers there and wire them into the model registry so they show up in the app.
|
||||
|
||||
- **Architecture docs**
|
||||
- High level overview: `docs/architecture/README.md`
|
||||
- High level flow: `docs/architecture/WORKING.md`
|
||||
|
||||
## API Documentation
|
||||
|
||||
Perplexica exposes several API endpoints for programmatic access, including:
|
||||
Perplexica includes API documentation for programmatic access.
|
||||
|
||||
- **Search API**: Access Perplexica's advanced search capabilities directly via the `/api/search` endpoint. For detailed documentation, see `docs/api/search.md`.
|
||||
- **Search API**: For detailed documentation, see `docs/API/SEARCH.md`.
|
||||
|
||||
## Setting Up Your Environment
|
||||
|
||||
Before diving into coding, setting up your local environment is key. Here's what you need to do:
|
||||
|
||||
1. In the root directory, locate the `sample.config.toml` file.
|
||||
2. Rename it to `config.toml` and fill in the necessary configuration fields.
|
||||
3. Run `npm install` to install all dependencies.
|
||||
4. Run `npm run db:migrate` to set up the local sqlite database.
|
||||
5. Use `npm run dev` to start the application in development mode.
|
||||
1. Run `npm install` to install all dependencies.
|
||||
2. Use `npm run dev` to start the application in development mode.
|
||||
3. Open http://localhost:3000 and complete the setup in the UI (API keys, models, search backend URL, etc.).
|
||||
|
||||
Database migrations are applied automatically on startup.
|
||||
|
||||
For full installation options (Docker and non Docker), see the installation guide in the repository README.
|
||||
|
||||
**Please note**: Docker configurations are present for setting up production environments, whereas `npm run dev` is used for development purposes.
|
||||
|
||||
|
||||
21
README.md
21
README.md
@@ -18,9 +18,11 @@ Want to know more about its architecture and how it works? You can read it [here
|
||||
|
||||
🤖 **Support for all major AI providers** - Use local LLMs through Ollama or connect to OpenAI, Anthropic Claude, Google Gemini, Groq, and more. Mix and match models based on your needs.
|
||||
|
||||
⚡ **Smart search modes** - Choose Balanced Mode for everyday searches, Fast Mode when you need quick answers, or wait for Quality Mode (coming soon) for deep research.
|
||||
⚡ **Smart search modes** - Choose Speed Mode when you need quick answers, Balanced Mode for everyday searches, or Quality Mode for deep research.
|
||||
|
||||
🎯 **Six specialized focus modes** - Get better results with modes designed for specific tasks: Academic papers, YouTube videos, Reddit discussions, Wolfram Alpha calculations, writing assistance, or general web search.
|
||||
🧭 **Pick your sources** - Search the web, discussions, or academic papers. More sources and integrations are in progress.
|
||||
|
||||
🧩 **Widgets** - Helpful UI cards that show up when relevant, like weather, calculations, stock prices, and other quick lookups.
|
||||
|
||||
🔍 **Web search powered by SearxNG** - Access multiple search engines while keeping your identity private. Support for Tavily and Exa coming soon for even better results.
|
||||
|
||||
@@ -81,7 +83,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
|
||||
Perplexica can be easily run using Docker. Simply run the following command:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data --name perplexica itzcrazykns1337/perplexica:latest
|
||||
```
|
||||
|
||||
This will pull and start the Perplexica container with the bundled SearxNG search engine. Once running, open your browser and navigate to http://localhost:3000. You can then configure your settings (API keys, models, etc.) directly in the setup screen.
|
||||
@@ -93,7 +95,7 @@ This will pull and start the Perplexica container with the bundled SearxNG searc
|
||||
If you already have SearxNG running, you can use the slim version of Perplexica:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
```
|
||||
|
||||
**Important**: Make sure your SearxNG instance has:
|
||||
@@ -120,7 +122,7 @@ If you prefer to build from source or need more control:
|
||||
|
||||
```bash
|
||||
docker build -t perplexica .
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica perplexica
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data --name perplexica perplexica
|
||||
```
|
||||
|
||||
5. Access Perplexica at http://localhost:3000 and configure your settings in the setup screen.
|
||||
@@ -237,13 +239,8 @@ Perplexica runs on Next.js and handles all API requests. It works right away on
|
||||
|
||||
## Upcoming Features
|
||||
|
||||
- [x] Add settings page
|
||||
- [x] Adding support for local LLMs
|
||||
- [x] History Saving features
|
||||
- [x] Introducing various Focus Modes
|
||||
- [x] Adding API support
|
||||
- [x] Adding Discover
|
||||
- [ ] Finalizing Copilot Mode
|
||||
- [ ] Adding more widgets, integrations, search sources
|
||||
- [ ] Adding authentication
|
||||
|
||||
## Support Us
|
||||
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
services:
|
||||
perplexica:
|
||||
image: itzcrazykns1337/perplexica:latest
|
||||
build:
|
||||
context: .
|
||||
ports:
|
||||
- '3000:3000'
|
||||
volumes:
|
||||
- data:/home/perplexica/data
|
||||
- uploads:/home/perplexica/uploads
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
name: 'perplexica-data'
|
||||
uploads:
|
||||
name: 'perplexica-uploads'
|
||||
name: 'perplexica-data'
|
||||
@@ -57,7 +57,7 @@ Use the `id` field as the `providerId` and the `key` field from the models array
|
||||
|
||||
### Request
|
||||
|
||||
The API accepts a JSON object in the request body, where you define the focus mode, chat models, embedding models, and your query.
|
||||
The API accepts a JSON object in the request body, where you define the enabled search `sources`, chat models, embedding models, and your query.
|
||||
|
||||
#### Request Body Structure
|
||||
|
||||
@@ -72,7 +72,7 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
||||
"key": "text-embedding-3-large"
|
||||
},
|
||||
"optimizationMode": "speed",
|
||||
"focusMode": "webSearch",
|
||||
"sources": ["web"],
|
||||
"query": "What is Perplexica",
|
||||
"history": [
|
||||
["human", "Hi, how are you?"],
|
||||
@@ -87,24 +87,25 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
||||
|
||||
### Request Parameters
|
||||
|
||||
- **`chatModel`** (object, optional): Defines the chat model to be used for the query. To get available providers and models, send a GET request to `http://localhost:3000/api/providers`.
|
||||
- **`chatModel`** (object, required): Defines the chat model to be used for the query. To get available providers and models, send a GET request to `http://localhost:3000/api/providers`.
|
||||
|
||||
- `providerId` (string): The UUID of the provider. You can get this from the `/api/providers` endpoint response.
|
||||
- `key` (string): The model key/identifier (e.g., `gpt-4o-mini`, `llama3.1:latest`). Use the `key` value from the provider's `chatModels` array, not the display name.
|
||||
|
||||
- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. To get available providers and models, send a GET request to `http://localhost:3000/api/providers`.
|
||||
- **`embeddingModel`** (object, required): Defines the embedding model for similarity-based searching. To get available providers and models, send a GET request to `http://localhost:3000/api/providers`.
|
||||
|
||||
- `providerId` (string): The UUID of the embedding provider. You can get this from the `/api/providers` endpoint response.
|
||||
- `key` (string): The embedding model key (e.g., `text-embedding-3-large`, `nomic-embed-text`). Use the `key` value from the provider's `embeddingModels` array, not the display name.
|
||||
|
||||
- **`focusMode`** (string, required): Specifies which focus mode to use. Available modes:
|
||||
- **`sources`** (array, required): Which search sources to enable. Available values:
|
||||
|
||||
- `webSearch`, `academicSearch`, `writingAssistant`, `wolframAlphaSearch`, `youtubeSearch`, `redditSearch`.
|
||||
- `web`, `academic`, `discussions`.
|
||||
|
||||
- **`optimizationMode`** (string, optional): Specifies the optimization mode to control the balance between performance and quality. Available modes:
|
||||
|
||||
- `speed`: Prioritize speed and return the fastest answer.
|
||||
- `balanced`: Provide a balanced answer with good speed and reasonable quality.
|
||||
- `quality`: Prioritize answer quality (may be slower).
|
||||
|
||||
- **`query`** (string, required): The search query or question.
|
||||
|
||||
@@ -132,14 +133,14 @@ The response from the API includes both the final message and the sources used t
|
||||
"message": "Perplexica is an innovative, open-source AI-powered search engine designed to enhance the way users search for information online. Here are some key features and characteristics of Perplexica:\n\n- **AI-Powered Technology**: It utilizes advanced machine learning algorithms to not only retrieve information but also to understand the context and intent behind user queries, providing more relevant results [1][5].\n\n- **Open-Source**: Being open-source, Perplexica offers flexibility and transparency, allowing users to explore its functionalities without the constraints of proprietary software [3][10].",
|
||||
"sources": [
|
||||
{
|
||||
"pageContent": "Perplexica is an innovative, open-source AI-powered search engine designed to enhance the way users search for information online.",
|
||||
"content": "Perplexica is an innovative, open-source AI-powered search engine designed to enhance the way users search for information online.",
|
||||
"metadata": {
|
||||
"title": "What is Perplexica, and how does it function as an AI-powered search ...",
|
||||
"url": "https://askai.glarity.app/search/What-is-Perplexica--and-how-does-it-function-as-an-AI-powered-search-engine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"pageContent": "Perplexica is an open-source AI-powered search tool that dives deep into the internet to find precise answers.",
|
||||
"content": "Perplexica is an open-source AI-powered search tool that dives deep into the internet to find precise answers.",
|
||||
"metadata": {
|
||||
"title": "Sahar Mor's Post",
|
||||
"url": "https://www.linkedin.com/posts/sahar-mor_a-new-open-source-project-called-perplexica-activity-7204489745668694016-ncja"
|
||||
@@ -158,7 +159,7 @@ Example of streamed response objects:
|
||||
|
||||
```
|
||||
{"type":"init","data":"Stream connected"}
|
||||
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
|
||||
{"type":"sources","data":[{"content":"...","metadata":{"title":"...","url":"..."}},...]}
|
||||
{"type":"response","data":"Perplexica is an "}
|
||||
{"type":"response","data":"innovative, open-source "}
|
||||
{"type":"response","data":"AI-powered search engine..."}
|
||||
@@ -174,9 +175,9 @@ Clients should process each line as a separate JSON object. The different messag
|
||||
|
||||
### Fields in the Response
|
||||
|
||||
- **`message`** (string): The search result, generated based on the query and focus mode.
|
||||
- **`message`** (string): The search result, generated based on the query and enabled `sources`.
|
||||
- **`sources`** (array): A list of sources that were used to generate the search result. Each source includes:
|
||||
- `pageContent`: A snippet of the relevant content from the source.
|
||||
- `content`: A snippet of the relevant content from the source.
|
||||
- `metadata`: Metadata about the source, including:
|
||||
- `title`: The title of the webpage.
|
||||
- `url`: The URL of the webpage.
|
||||
@@ -185,5 +186,5 @@ Clients should process each line as a separate JSON object. The different messag
|
||||
|
||||
If an error occurs during the search process, the API will return an appropriate error message with an HTTP status code.
|
||||
|
||||
- **400**: If the request is malformed or missing required fields (e.g., no focus mode or query).
|
||||
- **400**: If the request is malformed or missing required fields (e.g., no `sources` or `query`).
|
||||
- **500**: If an internal server error occurs during the search.
|
||||
|
||||
@@ -1,11 +1,38 @@
|
||||
# Perplexica's Architecture
|
||||
# Perplexica Architecture
|
||||
|
||||
Perplexica's architecture consists of the following key components:
|
||||
Perplexica is a Next.js application that combines an AI chat experience with search.
|
||||
|
||||
1. **User Interface**: A web-based interface that allows users to interact with Perplexica for searching images, videos, and much more.
|
||||
2. **Agent/Chains**: These components predict Perplexica's next actions, understand user queries, and decide whether a web search is necessary.
|
||||
3. **SearXNG**: A metadata search engine used by Perplexica to search the web for sources.
|
||||
4. **LLMs (Large Language Models)**: Utilized by agents and chains for tasks like understanding content, writing responses, and citing sources. Examples include Claude, GPTs, etc.
|
||||
5. **Embedding Models**: To improve the accuracy of search results, embedding models re-rank the results using similarity search algorithms such as cosine similarity and dot product distance.
|
||||
For a high level flow, see [WORKING.md](WORKING.md). For deeper implementation details, see [CONTRIBUTING.md](../../CONTRIBUTING.md).
|
||||
|
||||
For a more detailed explanation of how these components work together, see [WORKING.md](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/WORKING.md).
|
||||
## Key components
|
||||
|
||||
1. **User Interface**
|
||||
|
||||
- A web based UI that lets users chat, search, and view citations.
|
||||
|
||||
2. **API Routes**
|
||||
|
||||
- `POST /api/chat` powers the chat UI.
|
||||
- `POST /api/search` provides a programmatic search endpoint.
|
||||
- `GET /api/providers` lists available providers and model keys.
|
||||
|
||||
3. **Agents and Orchestration**
|
||||
|
||||
- The system classifies the question first.
|
||||
- It can run research and widgets in parallel.
|
||||
- It generates the final answer and includes citations.
|
||||
|
||||
4. **Search Backend**
|
||||
|
||||
- A meta search backend is used to fetch relevant web results when research is enabled.
|
||||
|
||||
5. **LLMs (Large Language Models)**
|
||||
|
||||
- Used for classification, writing answers, and producing citations.
|
||||
|
||||
6. **Embedding Models**
|
||||
|
||||
- Used for semantic search over user uploaded files.
|
||||
|
||||
7. **Storage**
|
||||
- Chats and messages are stored so conversations can be reloaded.
|
||||
|
||||
@@ -1,19 +1,72 @@
|
||||
# How does Perplexica work?
|
||||
# How Perplexica Works
|
||||
|
||||
Curious about how Perplexica works? Don't worry, we'll cover it here. Before we begin, make sure you've read about the architecture of Perplexica to ensure you understand what it's made up of. Haven't read it? You can read it [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/architecture/README.md).
|
||||
This is a high level overview of how Perplexica answers a question.
|
||||
|
||||
We'll understand how Perplexica works by taking an example of a scenario where a user asks: "How does an A.C. work?". We'll break down the process into steps to make it easier to understand. The steps are as follows:
|
||||
If you want a component level overview, see [README.md](README.md).
|
||||
|
||||
1. The message is sent to the `/api/chat` route where it invokes the chain. The chain will depend on your focus mode. For this example, let's assume we use the "webSearch" focus mode.
|
||||
2. The chain is now invoked; first, the message is passed to another chain where it first predicts (using the chat history and the question) whether there is a need for sources and searching the web. If there is, it will generate a query (in accordance with the chat history) for searching the web that we'll take up later. If not, the chain will end there, and then the answer generator chain, also known as the response generator, will be started.
|
||||
3. The query returned by the first chain is passed to SearXNG to search the web for information.
|
||||
4. After the information is retrieved, it is based on keyword-based search. We then convert the information into embeddings and the query as well, then we perform a similarity search to find the most relevant sources to answer the query.
|
||||
5. After all this is done, the sources are passed to the response generator. This chain takes all the chat history, the query, and the sources. It generates a response that is streamed to the UI.
|
||||
If you want implementation details, see [CONTRIBUTING.md](../../CONTRIBUTING.md).
|
||||
|
||||
## How are the answers cited?
|
||||
## What happens when you ask a question
|
||||
|
||||
The LLMs are prompted to do so. We've prompted them so well that they cite the answers themselves, and using some UI magic, we display it to the user.
|
||||
When you send a message in the UI, the app calls `POST /api/chat`.
|
||||
|
||||
## Image and Video Search
|
||||
At a high level, we do three things:
|
||||
|
||||
Image and video searches are conducted in a similar manner. A query is always generated first, then we search the web for images and videos that match the query. These results are then returned to the user.
|
||||
1. Classify the question and decide what to do next.
|
||||
2. Run research and widgets in parallel.
|
||||
3. Write the final answer and include citations.
|
||||
|
||||
## Classification
|
||||
|
||||
Before searching or answering, we run a classification step.
|
||||
|
||||
This step decides things like:
|
||||
|
||||
- Whether we should do research for this question
|
||||
- Whether we should show any widgets
|
||||
- How to rewrite the question into a clearer standalone form
|
||||
|
||||
## Widgets
|
||||
|
||||
Widgets are small, structured helpers that can run alongside research.
|
||||
|
||||
Examples include weather, stocks, and simple calculations.
|
||||
|
||||
If a widget is relevant, we show it in the UI while the answer is still being generated.
|
||||
|
||||
Widgets are helpful context for the answer, but they are not part of what the model should cite.
|
||||
|
||||
## Research
|
||||
|
||||
If research is needed, we gather information in the background while widgets can run.
|
||||
|
||||
Depending on configuration, research may include web lookup and searching user uploaded files.
|
||||
|
||||
## Answer generation
|
||||
|
||||
Once we have enough context, the chat model generates the final response.
|
||||
|
||||
You can control the tradeoff between speed and quality using `optimizationMode`:
|
||||
|
||||
- `speed`
|
||||
- `balanced`
|
||||
- `quality`
|
||||
|
||||
## How citations work
|
||||
|
||||
We prompt the model to cite the references it used. The UI then renders those citations alongside the supporting links.
|
||||
|
||||
## Search API
|
||||
|
||||
If you are integrating Perplexica into another product, you can call `POST /api/search`.
|
||||
|
||||
It returns:
|
||||
|
||||
- `message`: the generated answer
|
||||
- `sources`: supporting references used for the answer
|
||||
|
||||
You can also enable streaming by setting `stream: true`.
|
||||
|
||||
## Image and video search
|
||||
|
||||
Image and video search use separate endpoints (`POST /api/images` and `POST /api/videos`). We generate a focused query using the chat model, then fetch matching results from a search backend.
|
||||
|
||||
@@ -10,7 +10,7 @@ Simply pull the latest image and restart your container:
|
||||
docker pull itzcrazykns1337/perplexica:latest
|
||||
docker stop perplexica
|
||||
docker rm perplexica
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:latest
|
||||
docker run -d -p 3000:3000 -v perplexica-data:/home/perplexica/data --name perplexica itzcrazykns1337/perplexica:latest
|
||||
```
|
||||
|
||||
For slim version:
|
||||
@@ -19,7 +19,7 @@ For slim version:
|
||||
docker pull itzcrazykns1337/perplexica:slim-latest
|
||||
docker stop perplexica
|
||||
docker rm perplexica
|
||||
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data -v perplexica-uploads:/home/perplexica/uploads --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
docker run -d -p 3000:3000 -e SEARXNG_API_URL=http://your-searxng-url:8080 -v perplexica-data:/home/perplexica/data --name perplexica itzcrazykns1337/perplexica:slim-latest
|
||||
```
|
||||
|
||||
Once updated, go to http://localhost:3000 and verify the latest changes. Your settings are preserved automatically.
|
||||
|
||||
@@ -1,15 +1 @@
|
||||
PRAGMA foreign_keys=OFF;--> statement-breakpoint
|
||||
CREATE TABLE `__new_messages` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`messageId` text NOT NULL,
|
||||
`chatId` text NOT NULL,
|
||||
`backendId` text NOT NULL,
|
||||
`query` text NOT NULL,
|
||||
`createdAt` text NOT NULL,
|
||||
`responseBlocks` text DEFAULT '[]',
|
||||
`status` text DEFAULT 'answering'
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DROP TABLE `messages`;--> statement-breakpoint
|
||||
ALTER TABLE `__new_messages` RENAME TO `messages`;--> statement-breakpoint
|
||||
PRAGMA foreign_keys=ON;
|
||||
/* do nothing */
|
||||
@@ -28,8 +28,8 @@
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"focusMode": {
|
||||
"name": "focusMode",
|
||||
"sources": {
|
||||
"name": "sources",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
|
||||
2
next-env.d.ts
vendored
2
next-env.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./.next/dev/types/routes.d.ts";
|
||||
import './.next/dev/types/routes.d.ts';
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -11,6 +11,13 @@ const nextConfig = {
|
||||
],
|
||||
},
|
||||
serverExternalPackages: ['pdf-parse'],
|
||||
outputFileTracingIncludes: {
|
||||
'/api/**': [
|
||||
'./node_modules/@napi-rs/canvas/**',
|
||||
'./node_modules/@napi-rs/canvas-linux-x64-gnu/**',
|
||||
'./node_modules/@napi-rs/canvas-linux-x64-musl/**',
|
||||
],
|
||||
},
|
||||
env: {
|
||||
NEXT_PUBLIC_VERSION: pkg.version,
|
||||
},
|
||||
|
||||
41
package.json
41
package.json
@@ -1,71 +1,65 @@
|
||||
{
|
||||
"name": "perplexica-frontend",
|
||||
"version": "1.11.2",
|
||||
"name": "perplexica",
|
||||
"version": "1.12.1",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"dev": "next dev --webpack",
|
||||
"build": "next build --webpack",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"format:write": "prettier . --write"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google/genai": "^1.34.0",
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@headlessui/tailwindcss": "^0.2.2",
|
||||
"@huggingface/transformers": "^3.7.5",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@huggingface/transformers": "^3.8.1",
|
||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||
"@langchain/anthropic": "^1.0.1",
|
||||
"@langchain/community": "^1.0.3",
|
||||
"@langchain/core": "^1.0.5",
|
||||
"@langchain/google-genai": "^1.0.1",
|
||||
"@langchain/groq": "^1.0.1",
|
||||
"@langchain/langgraph": "^1.0.1",
|
||||
"@langchain/ollama": "^1.0.1",
|
||||
"@langchain/openai": "^1.1.1",
|
||||
"@langchain/textsplitters": "^1.0.0",
|
||||
"@phosphor-icons/react": "^2.1.10",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tailwindcss/typography": "^0.5.12",
|
||||
"@toolsycc/json-repair": "^0.1.22",
|
||||
"axios": "^1.8.3",
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"clsx": "^2.1.0",
|
||||
"drizzle-orm": "^0.40.1",
|
||||
"framer-motion": "^12.23.25",
|
||||
"html-to-text": "^9.0.5",
|
||||
"jspdf": "^3.0.1",
|
||||
"langchain": "^1.0.4",
|
||||
"js-tiktoken": "^1.0.21",
|
||||
"jspdf": "^3.0.4",
|
||||
"lightweight-charts": "^5.0.9",
|
||||
"lucide-react": "^0.556.0",
|
||||
"mammoth": "^1.9.1",
|
||||
"markdown-to-jsx": "^7.7.2",
|
||||
"mathjs": "^15.1.0",
|
||||
"motion": "^12.23.26",
|
||||
"next": "^16.0.7",
|
||||
"next-themes": "^0.3.0",
|
||||
"officeparser": "^5.2.2",
|
||||
"ollama": "^0.6.3",
|
||||
"openai": "^6.9.0",
|
||||
"partial-json": "^0.1.7",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"pdf-parse": "^2.4.5",
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"react-syntax-highlighter": "^16.1.0",
|
||||
"react-text-to-speech": "^0.14.5",
|
||||
"react-textarea-autosize": "^8.5.3",
|
||||
"rfc6902": "^5.1.2",
|
||||
"sonner": "^1.4.41",
|
||||
"tailwind-merge": "^2.2.2",
|
||||
"turndown": "^7.2.2",
|
||||
"winston": "^3.17.0",
|
||||
"yahoo-finance2": "^3.10.2",
|
||||
"yet-another-react-lightbox": "^3.17.2",
|
||||
"zod": "^4.1.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.6.12",
|
||||
"@types/html-to-text": "^9.0.4",
|
||||
"@types/jspdf": "^2.0.0",
|
||||
"@types/node": "^24.8.1",
|
||||
"@types/pdf-parse": "^1.1.4",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"@types/turndown": "^5.0.6",
|
||||
"autoprefixer": "^10.0.1",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
@@ -75,5 +69,8 @@
|
||||
"prettier": "^3.2.5",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@napi-rs/canvas": "^0.1.87"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import crypto from 'crypto';
|
||||
import { z } from 'zod';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import SearchAgent from '@/lib/agents/search';
|
||||
import SessionManager from '@/lib/session';
|
||||
import { ChatTurnMessage } from '@/lib/types';
|
||||
import { SearchSources } from '@/lib/agents/search/types';
|
||||
import db from '@/lib/db';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { chats } from '@/lib/db/schema';
|
||||
import UploadManager from '@/lib/uploads/manager';
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
@@ -32,7 +36,7 @@ const bodySchema = z.object({
|
||||
optimizationMode: z.enum(['speed', 'balanced', 'quality'], {
|
||||
message: 'Optimization mode must be one of: speed, balanced, quality',
|
||||
}),
|
||||
focusMode: z.string().min(1, 'Focus mode is required'),
|
||||
sources: z.array(z.string()).optional().default([]),
|
||||
history: z
|
||||
.array(z.tuple([z.string(), z.string()]))
|
||||
.optional()
|
||||
@@ -43,7 +47,6 @@ const bodySchema = z.object({
|
||||
systemInstructions: z.string().nullable().optional().default(''),
|
||||
});
|
||||
|
||||
type Message = z.infer<typeof messageSchema>;
|
||||
type Body = z.infer<typeof bodySchema>;
|
||||
|
||||
const safeValidateBody = (data: unknown) => {
|
||||
@@ -65,6 +68,38 @@ const safeValidateBody = (data: unknown) => {
|
||||
};
|
||||
};
|
||||
|
||||
const ensureChatExists = async (input: {
|
||||
id: string;
|
||||
sources: SearchSources[];
|
||||
query: string;
|
||||
fileIds: string[];
|
||||
}) => {
|
||||
try {
|
||||
const exists = await db.query.chats
|
||||
.findFirst({
|
||||
where: eq(chats.id, input.id),
|
||||
})
|
||||
.execute();
|
||||
|
||||
if (!exists) {
|
||||
await db.insert(chats).values({
|
||||
id: input.id,
|
||||
createdAt: new Date().toISOString(),
|
||||
sources: input.sources,
|
||||
title: input.query,
|
||||
files: input.fileIds.map((id) => {
|
||||
return {
|
||||
fileId: id,
|
||||
name: UploadManager.getFile(id)?.name || 'Uploaded File',
|
||||
};
|
||||
}),
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to check/save chat:', err);
|
||||
}
|
||||
};
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
try {
|
||||
const reqBody = (await req.json()) as Body;
|
||||
@@ -121,95 +156,86 @@ export const POST = async (req: Request) => {
|
||||
const writer = responseStream.writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
let receivedMessage = '';
|
||||
|
||||
session.addListener('data', (data: any) => {
|
||||
if (data.type === 'response') {
|
||||
const disconnect = session.subscribe((event: string, data: any) => {
|
||||
if (event === 'data') {
|
||||
if (data.type === 'block') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'block',
|
||||
block: data.block,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'updateBlock') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'updateBlock',
|
||||
blockId: data.blockId,
|
||||
patch: data.patch,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'researchComplete') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'researchComplete',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
}
|
||||
} else if (event === 'end') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'message',
|
||||
type: 'messageEnd',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.close();
|
||||
session.removeAllListeners();
|
||||
} else if (event === 'error') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: data.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
receivedMessage += data.data;
|
||||
} else if (data.type === 'sources') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'sources',
|
||||
data: data.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'block') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'block',
|
||||
block: data.block,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'updateBlock') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'updateBlock',
|
||||
blockId: data.blockId,
|
||||
patch: data.patch,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'researchComplete') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'researchComplete',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.close();
|
||||
session.removeAllListeners();
|
||||
}
|
||||
});
|
||||
|
||||
session.addListener('end', () => {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'messageEnd',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.close();
|
||||
session.removeAllListeners();
|
||||
});
|
||||
|
||||
session.addListener('error', (data: any) => {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: data.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.close();
|
||||
session.removeAllListeners();
|
||||
});
|
||||
|
||||
agent.searchAsync(session, {
|
||||
chatHistory: history,
|
||||
followUp: message.content,
|
||||
chatId: body.message.chatId,
|
||||
messageId: body.message.messageId,
|
||||
config: {
|
||||
llm,
|
||||
embedding: embedding,
|
||||
sources: ['web'],
|
||||
sources: body.sources as SearchSources[],
|
||||
mode: body.optimizationMode,
|
||||
fileIds: body.files,
|
||||
systemInstructions: body.systemInstructions || 'None',
|
||||
},
|
||||
});
|
||||
|
||||
/* handleHistorySave(message, humanMessageId, body.focusMode, body.files); */
|
||||
ensureChatExists({
|
||||
id: body.message.chatId,
|
||||
sources: body.sources as SearchSources[],
|
||||
fileIds: body.files,
|
||||
query: body.message.content,
|
||||
});
|
||||
|
||||
req.signal.addEventListener('abort', () => {
|
||||
disconnect();
|
||||
writer.close();
|
||||
});
|
||||
|
||||
return new Response(responseStream.readable, {
|
||||
headers: {
|
||||
|
||||
@@ -21,7 +21,10 @@ export const POST = async (req: Request) => {
|
||||
|
||||
const images = await searchImages(
|
||||
{
|
||||
chatHistory: body.chatHistory,
|
||||
chatHistory: body.chatHistory.map(([role, content]) => ({
|
||||
role: role === 'human' ? 'user' : 'assistant',
|
||||
content,
|
||||
})),
|
||||
query: body.query,
|
||||
},
|
||||
llm,
|
||||
|
||||
93
src/app/api/reconnect/[id]/route.ts
Normal file
93
src/app/api/reconnect/[id]/route.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import SessionManager from '@/lib/session';
|
||||
|
||||
export const POST = async (
|
||||
req: Request,
|
||||
{ params }: { params: Promise<{ id: string }> },
|
||||
) => {
|
||||
try {
|
||||
const { id } = await params;
|
||||
|
||||
const session = SessionManager.getSession(id);
|
||||
|
||||
if (!session) {
|
||||
return Response.json({ message: 'Session not found' }, { status: 404 });
|
||||
}
|
||||
|
||||
const responseStream = new TransformStream();
|
||||
const writer = responseStream.writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const disconnect = session.subscribe((event, data) => {
|
||||
if (event === 'data') {
|
||||
if (data.type === 'block') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'block',
|
||||
block: data.block,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'updateBlock') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'updateBlock',
|
||||
blockId: data.blockId,
|
||||
patch: data.patch,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'researchComplete') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'researchComplete',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
}
|
||||
} else if (event === 'end') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'messageEnd',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.close();
|
||||
disconnect();
|
||||
} else if (event === 'error') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: data.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
writer.close();
|
||||
disconnect();
|
||||
}
|
||||
});
|
||||
|
||||
req.signal.addEventListener('abort', () => {
|
||||
disconnect();
|
||||
writer.close();
|
||||
});
|
||||
|
||||
return new Response(responseStream.readable, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Connection: 'keep-alive',
|
||||
'Cache-Control': 'no-cache, no-transform',
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Error in reconnecting to session stream: ', err);
|
||||
return Response.json(
|
||||
{ message: 'An error has occurred.' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,12 +1,13 @@
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import SessionManager from '@/lib/session';
|
||||
import SearchAgent from '@/lib/agents/search';
|
||||
import { ChatTurnMessage } from '@/lib/types';
|
||||
import { SearchSources } from '@/lib/agents/search/types';
|
||||
import APISearchAgent from '@/lib/agents/search/api';
|
||||
|
||||
interface ChatRequestBody {
|
||||
optimizationMode: 'speed' | 'balanced';
|
||||
focusMode: string;
|
||||
optimizationMode: 'speed' | 'balanced' | 'quality';
|
||||
sources: SearchSources[];
|
||||
chatModel: ModelWithProvider;
|
||||
embeddingModel: ModelWithProvider;
|
||||
query: string;
|
||||
@@ -19,15 +20,15 @@ export const POST = async (req: Request) => {
|
||||
try {
|
||||
const body: ChatRequestBody = await req.json();
|
||||
|
||||
if (!body.focusMode || !body.query) {
|
||||
if (!body.sources || !body.query) {
|
||||
return Response.json(
|
||||
{ message: 'Missing focus mode or query' },
|
||||
{ message: 'Missing sources or query' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
body.history = body.history || [];
|
||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||
body.optimizationMode = body.optimizationMode || 'speed';
|
||||
body.stream = body.stream || false;
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
@@ -48,17 +49,21 @@ export const POST = async (req: Request) => {
|
||||
|
||||
const session = SessionManager.createSession();
|
||||
|
||||
const agent = new SearchAgent();
|
||||
const agent = new APISearchAgent();
|
||||
|
||||
agent.searchAsync(session, {
|
||||
chatHistory: history,
|
||||
config: {
|
||||
embedding: embeddings,
|
||||
llm: llm,
|
||||
sources: ['web', 'discussions', 'academic'],
|
||||
mode: 'balanced',
|
||||
sources: body.sources,
|
||||
mode: body.optimizationMode,
|
||||
fileIds: [],
|
||||
systemInstructions: body.systemInstructions || '',
|
||||
},
|
||||
followUp: body.query,
|
||||
chatId: crypto.randomUUID(),
|
||||
messageId: crypto.randomUUID(),
|
||||
});
|
||||
|
||||
if (!body.stream) {
|
||||
@@ -70,36 +75,37 @@ export const POST = async (req: Request) => {
|
||||
let message = '';
|
||||
let sources: any[] = [];
|
||||
|
||||
session.addListener('data', (data: string) => {
|
||||
try {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.type === 'response') {
|
||||
message += parsedData.data;
|
||||
} else if (parsedData.type === 'sources') {
|
||||
sources = parsedData.data;
|
||||
session.subscribe((event: string, data: Record<string, any>) => {
|
||||
if (event === 'data') {
|
||||
try {
|
||||
if (data.type === 'response') {
|
||||
message += data.data;
|
||||
} else if (data.type === 'searchResults') {
|
||||
sources = data.data;
|
||||
}
|
||||
} catch (error) {
|
||||
reject(
|
||||
Response.json(
|
||||
{ message: 'Error parsing data' },
|
||||
{ status: 500 },
|
||||
),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
|
||||
if (event === 'end') {
|
||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||
}
|
||||
|
||||
if (event === 'error') {
|
||||
reject(
|
||||
Response.json(
|
||||
{ message: 'Error parsing data' },
|
||||
{ message: 'Search error', error: data },
|
||||
{ status: 500 },
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
session.addListener('end', () => {
|
||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||
});
|
||||
|
||||
session.addListener('error', (error: any) => {
|
||||
reject(
|
||||
Response.json(
|
||||
{ message: 'Search error', error },
|
||||
{ status: 500 },
|
||||
),
|
||||
);
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -130,54 +136,54 @@ export const POST = async (req: Request) => {
|
||||
} catch (error) {}
|
||||
});
|
||||
|
||||
session.addListener('data', (data: string) => {
|
||||
if (signal.aborted) return;
|
||||
session.subscribe((event: string, data: Record<string, any>) => {
|
||||
if (event === 'data') {
|
||||
if (signal.aborted) return;
|
||||
|
||||
try {
|
||||
const parsedData = JSON.parse(data);
|
||||
|
||||
if (parsedData.type === 'response') {
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'response',
|
||||
data: parsedData.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (parsedData.type === 'sources') {
|
||||
sources = parsedData.data;
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'sources',
|
||||
data: sources,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
try {
|
||||
if (data.type === 'response') {
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'response',
|
||||
data: data.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (data.type === 'searchResults') {
|
||||
sources = data.data;
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'sources',
|
||||
data: sources,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
controller.error(error);
|
||||
}
|
||||
} catch (error) {
|
||||
controller.error(error);
|
||||
}
|
||||
});
|
||||
|
||||
session.addListener('end', () => {
|
||||
if (signal.aborted) return;
|
||||
if (event === 'end') {
|
||||
if (signal.aborted) return;
|
||||
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'done',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
controller.close();
|
||||
});
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'done',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
controller.close();
|
||||
}
|
||||
|
||||
session.addListener('error', (error: any) => {
|
||||
if (signal.aborted) return;
|
||||
if (event === 'error') {
|
||||
if (signal.aborted) return;
|
||||
|
||||
controller.error(error);
|
||||
controller.error(data);
|
||||
}
|
||||
});
|
||||
},
|
||||
cancel() {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import generateSuggestions from '@/lib/agents/suggestions';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
|
||||
interface SuggestionsGenerationBody {
|
||||
chatHistory: any[];
|
||||
@@ -21,7 +20,10 @@ export const POST = async (req: Request) => {
|
||||
|
||||
const suggestions = await generateSuggestions(
|
||||
{
|
||||
chatHistory: body.chatHistory,
|
||||
chatHistory: body.chatHistory.map(([role, content]) => ({
|
||||
role: role === 'human' ? 'user' : 'assistant',
|
||||
content,
|
||||
})),
|
||||
},
|
||||
llm,
|
||||
);
|
||||
|
||||
@@ -1,40 +1,16 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
|
||||
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { Chunk } from '@/lib/types';
|
||||
|
||||
interface FileRes {
|
||||
fileName: string;
|
||||
fileExtension: string;
|
||||
fileId: string;
|
||||
}
|
||||
|
||||
const uploadDir = path.join(process.cwd(), 'uploads');
|
||||
|
||||
if (!fs.existsSync(uploadDir)) {
|
||||
fs.mkdirSync(uploadDir, { recursive: true });
|
||||
}
|
||||
|
||||
const splitter = new RecursiveCharacterTextSplitter({
|
||||
chunkSize: 500,
|
||||
chunkOverlap: 100,
|
||||
});
|
||||
import UploadManager from '@/lib/uploads/manager';
|
||||
|
||||
export async function POST(req: Request) {
|
||||
try {
|
||||
const formData = await req.formData();
|
||||
|
||||
const files = formData.getAll('files') as File[];
|
||||
const embedding_model = formData.get('embedding_model_key') as string;
|
||||
const embedding_model_provider = formData.get('embedding_model_provider_id') as string;
|
||||
const embeddingModel = formData.get('embedding_model_key') as string;
|
||||
const embeddingModelProvider = formData.get('embedding_model_provider_id') as string;
|
||||
|
||||
if (!embedding_model || !embedding_model_provider) {
|
||||
if (!embeddingModel || !embeddingModelProvider) {
|
||||
return NextResponse.json(
|
||||
{ message: 'Missing embedding model or provider' },
|
||||
{ status: 400 },
|
||||
@@ -43,81 +19,13 @@ export async function POST(req: Request) {
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const model = await registry.loadEmbeddingModel(embedding_model_provider, embedding_model);
|
||||
const model = await registry.loadEmbeddingModel(embeddingModelProvider, embeddingModel);
|
||||
|
||||
const uploadManager = new UploadManager({
|
||||
embeddingModel: model,
|
||||
})
|
||||
|
||||
const processedFiles: FileRes[] = [];
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file: any) => {
|
||||
const fileExtension = file.name.split('.').pop();
|
||||
if (!['pdf', 'docx', 'txt'].includes(fileExtension!)) {
|
||||
return NextResponse.json(
|
||||
{ message: 'File type not supported' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const uniqueFileName = `${crypto.randomBytes(16).toString('hex')}.${fileExtension}`;
|
||||
const filePath = path.join(uploadDir, uniqueFileName);
|
||||
|
||||
const buffer = Buffer.from(await file.arrayBuffer());
|
||||
fs.writeFileSync(filePath, new Uint8Array(buffer));
|
||||
|
||||
let docs: any[] = [];
|
||||
if (fileExtension === 'pdf') {
|
||||
const loader = new PDFLoader(filePath);
|
||||
docs = await loader.load();
|
||||
} else if (fileExtension === 'docx') {
|
||||
const loader = new DocxLoader(filePath);
|
||||
docs = await loader.load();
|
||||
} else if (fileExtension === 'txt') {
|
||||
const text = fs.readFileSync(filePath, 'utf-8');
|
||||
docs = [
|
||||
new Document({ pageContent: text, metadata: { title: file.name } }),
|
||||
];
|
||||
}
|
||||
|
||||
const splitted = await splitter.splitDocuments(docs);
|
||||
|
||||
const extractedDataPath = filePath.replace(/\.\w+$/, '-extracted.json');
|
||||
fs.writeFileSync(
|
||||
extractedDataPath,
|
||||
JSON.stringify({
|
||||
title: file.name,
|
||||
contents: splitted.map((doc) => doc.pageContent),
|
||||
}),
|
||||
);
|
||||
|
||||
const chunks: Chunk[] = splitted.map((doc) => {
|
||||
return {
|
||||
content: doc.pageContent,
|
||||
metadata: doc.metadata,
|
||||
}
|
||||
});
|
||||
|
||||
const embeddings = await model.embedChunks(
|
||||
chunks
|
||||
);
|
||||
|
||||
const embeddingsDataPath = filePath.replace(
|
||||
/\.\w+$/,
|
||||
'-embeddings.json',
|
||||
);
|
||||
fs.writeFileSync(
|
||||
embeddingsDataPath,
|
||||
JSON.stringify({
|
||||
title: file.name,
|
||||
embeddings,
|
||||
}),
|
||||
);
|
||||
|
||||
processedFiles.push({
|
||||
fileName: file.name,
|
||||
fileExtension: fileExtension,
|
||||
fileId: uniqueFileName.replace(/\.\w+$/, ''),
|
||||
});
|
||||
}),
|
||||
);
|
||||
const processedFiles = await uploadManager.processFiles(files);
|
||||
|
||||
return NextResponse.json({
|
||||
files: processedFiles,
|
||||
|
||||
@@ -21,7 +21,10 @@ export const POST = async (req: Request) => {
|
||||
|
||||
const videos = await handleVideoSearch(
|
||||
{
|
||||
chatHistory: body.chatHistory,
|
||||
chatHistory: body.chatHistory.map(([role, content]) => ({
|
||||
role: role === 'human' ? 'user' : 'assistant',
|
||||
content,
|
||||
})),
|
||||
query: body.query,
|
||||
},
|
||||
llm,
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
'use client';
|
||||
|
||||
import ChatWindow from '@/components/ChatWindow';
|
||||
import React from 'react';
|
||||
|
||||
const Page = () => {
|
||||
return <ChatWindow />;
|
||||
};
|
||||
|
||||
export default Page;
|
||||
export default ChatWindow;
|
||||
|
||||
@@ -34,7 +34,7 @@ export default function RootLayout({
|
||||
|
||||
return (
|
||||
<html className="h-full" lang="en" suppressHydrationWarning>
|
||||
<body className={cn('h-full', montserrat.className)}>
|
||||
<body className={cn('h-full antialiased', montserrat.className)}>
|
||||
<ThemeProvider>
|
||||
{setupComplete ? (
|
||||
<ChatProvider>
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
'use client';
|
||||
|
||||
import DeleteChat from '@/components/DeleteChat';
|
||||
import { cn, formatTimeDifference } from '@/lib/utils';
|
||||
import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react';
|
||||
import { formatTimeDifference } from '@/lib/utils';
|
||||
import { BookOpenText, ClockIcon, FileText, Globe2Icon } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
@@ -10,7 +10,8 @@ export interface Chat {
|
||||
id: string;
|
||||
title: string;
|
||||
createdAt: string;
|
||||
focusMode: string;
|
||||
sources: string[];
|
||||
files: { fileId: string; name: string }[];
|
||||
}
|
||||
|
||||
const Page = () => {
|
||||
@@ -37,74 +38,137 @@ const Page = () => {
|
||||
fetchChats();
|
||||
}, []);
|
||||
|
||||
return loading ? (
|
||||
<div className="flex flex-row items-center justify-center min-h-screen">
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]"
|
||||
viewBox="0 0 100 101"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
<path
|
||||
d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z"
|
||||
fill="currentFill"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
) : (
|
||||
return (
|
||||
<div>
|
||||
<div className="flex flex-col pt-4">
|
||||
<div className="flex items-center">
|
||||
<BookOpenText />
|
||||
<h1 className="text-3xl font-medium p-2">Library</h1>
|
||||
</div>
|
||||
<hr className="border-t border-[#2B2C2C] my-4 w-full" />
|
||||
</div>
|
||||
{chats.length === 0 && (
|
||||
<div className="flex flex-row items-center justify-center min-h-screen">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
No chats found.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
{chats.length > 0 && (
|
||||
<div className="flex flex-col pb-20 lg:pb-2">
|
||||
{chats.map((chat, i) => (
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-col space-y-4 py-6',
|
||||
i !== chats.length - 1
|
||||
? 'border-b border-white-200 dark:border-dark-200'
|
||||
: '',
|
||||
)}
|
||||
key={i}
|
||||
>
|
||||
<Link
|
||||
href={`/c/${chat.id}`}
|
||||
className="text-black dark:text-white lg:text-xl font-medium truncate transition duration-200 hover:text-[#24A0ED] dark:hover:text-[#24A0ED] cursor-pointer"
|
||||
<div className="flex flex-col pt-10 border-b border-light-200/20 dark:border-dark-200/20 pb-6 px-2">
|
||||
<div className="flex flex-col lg:flex-row lg:items-end lg:justify-between gap-3">
|
||||
<div className="flex items-center justify-center">
|
||||
<BookOpenText size={45} className="mb-2.5" />
|
||||
<div className="flex flex-col">
|
||||
<h1
|
||||
className="text-5xl font-normal p-2 pb-0"
|
||||
style={{ fontFamily: 'PP Editorial, serif' }}
|
||||
>
|
||||
{chat.title}
|
||||
</Link>
|
||||
<div className="flex flex-row items-center justify-between w-full">
|
||||
<div className="flex flex-row items-center space-x-1 lg:space-x-1.5 text-black/70 dark:text-white/70">
|
||||
<ClockIcon size={15} />
|
||||
<p className="text-xs">
|
||||
{formatTimeDifference(new Date(), chat.createdAt)} Ago
|
||||
</p>
|
||||
</div>
|
||||
<DeleteChat
|
||||
chatId={chat.id}
|
||||
chats={chats}
|
||||
setChats={setChats}
|
||||
/>
|
||||
Library
|
||||
</h1>
|
||||
<div className="px-2 text-sm text-black/60 dark:text-white/60 text-center lg:text-left">
|
||||
Past chats, sources, and uploads.
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-center lg:justify-end gap-2 text-xs text-black/60 dark:text-white/60">
|
||||
<span className="inline-flex items-center gap-1 rounded-full border border-black/20 dark:border-white/20 px-2 py-0.5">
|
||||
<BookOpenText size={14} />
|
||||
{loading
|
||||
? 'Loading…'
|
||||
: `${chats.length} ${chats.length === 1 ? 'chat' : 'chats'}`}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{loading ? (
|
||||
<div className="flex flex-row items-center justify-center min-h-[60vh]">
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]"
|
||||
viewBox="0 0 100 101"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
<path
|
||||
d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z"
|
||||
fill="currentFill"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
) : chats.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center min-h-[70vh] px-2 text-center">
|
||||
<div className="flex items-center justify-center w-12 h-12 rounded-2xl border border-light-200 dark:border-dark-200 bg-light-secondary dark:bg-dark-secondary">
|
||||
<BookOpenText className="text-black/70 dark:text-white/70" />
|
||||
</div>
|
||||
<p className="mt-2 text-black/70 dark:text-white/70 text-sm">
|
||||
No chats found.
|
||||
</p>
|
||||
<p className="mt-1 text-black/70 dark:text-white/70 text-sm">
|
||||
<Link href="/" className="text-sky-400">
|
||||
Start a new chat
|
||||
</Link>{' '}
|
||||
to see it listed here.
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="pt-6 pb-28 px-2">
|
||||
<div className="rounded-2xl border border-light-200 dark:border-dark-200 overflow-hidden bg-light-primary dark:bg-dark-primary">
|
||||
{chats.map((chat, index) => {
|
||||
const sourcesLabel =
|
||||
chat.sources.length === 0
|
||||
? null
|
||||
: chat.sources.length <= 2
|
||||
? chat.sources
|
||||
.map((s) => s.charAt(0).toUpperCase() + s.slice(1))
|
||||
.join(', ')
|
||||
: `${chat.sources
|
||||
.slice(0, 2)
|
||||
.map((s) => s.charAt(0).toUpperCase() + s.slice(1))
|
||||
.join(', ')} + ${chat.sources.length - 2}`;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={chat.id}
|
||||
className={
|
||||
'group flex flex-col gap-2 p-4 hover:bg-light-secondary dark:hover:bg-dark-secondary transition-colors duration-200 ' +
|
||||
(index !== chats.length - 1
|
||||
? 'border-b border-light-200 dark:border-dark-200'
|
||||
: '')
|
||||
}
|
||||
>
|
||||
<div className="flex items-start justify-between gap-3">
|
||||
<Link
|
||||
href={`/c/${chat.id}`}
|
||||
className="flex-1 text-black dark:text-white text-base lg:text-lg font-medium leading-snug line-clamp-2 group-hover:text-[#24A0ED] transition duration-200"
|
||||
title={chat.title}
|
||||
>
|
||||
{chat.title}
|
||||
</Link>
|
||||
<div className="pt-0.5 shrink-0">
|
||||
<DeleteChat
|
||||
chatId={chat.id}
|
||||
chats={chats}
|
||||
setChats={setChats}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap items-center gap-2 text-black/70 dark:text-white/70">
|
||||
<span className="inline-flex items-center gap-1 text-xs">
|
||||
<ClockIcon size={14} />
|
||||
{formatTimeDifference(new Date(), chat.createdAt)} Ago
|
||||
</span>
|
||||
|
||||
{sourcesLabel && (
|
||||
<span className="inline-flex items-center gap-1 text-xs border border-black/20 dark:border-white/20 rounded-full px-2 py-0.5">
|
||||
<Globe2Icon size={14} />
|
||||
{sourcesLabel}
|
||||
</span>
|
||||
)}
|
||||
{chat.files.length > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-xs border border-black/20 dark:border-white/20 rounded-full px-2 py-0.5">
|
||||
<FileText size={14} />
|
||||
{chat.files.length}{' '}
|
||||
{chat.files.length === 1 ? 'file' : 'files'}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -16,9 +16,12 @@ import { useChat } from '@/lib/hooks/useChat';
|
||||
const getStepIcon = (step: ResearchBlockSubStep) => {
|
||||
if (step.type === 'reasoning') {
|
||||
return <Brain className="w-4 h-4" />;
|
||||
} else if (step.type === 'searching') {
|
||||
} else if (step.type === 'searching' || step.type === 'upload_searching') {
|
||||
return <Search className="w-4 h-4" />;
|
||||
} else if (step.type === 'search_results') {
|
||||
} else if (
|
||||
step.type === 'search_results' ||
|
||||
step.type === 'upload_search_results'
|
||||
) {
|
||||
return <FileText className="w-4 h-4" />;
|
||||
} else if (step.type === 'reading') {
|
||||
return <BookSearch className="w-4 h-4" />;
|
||||
@@ -39,6 +42,10 @@ const getStepTitle = (
|
||||
return `Found ${step.reading.length} ${step.reading.length === 1 ? 'result' : 'results'}`;
|
||||
} else if (step.type === 'reading') {
|
||||
return `Reading ${step.reading.length} ${step.reading.length === 1 ? 'source' : 'sources'}`;
|
||||
} else if (step.type === 'upload_searching') {
|
||||
return 'Scanning your uploaded documents';
|
||||
} else if (step.type === 'upload_search_results') {
|
||||
return `Reading ${step.results.length} ${step.results.length === 1 ? 'document' : 'documents'}`;
|
||||
}
|
||||
|
||||
return 'Processing';
|
||||
@@ -47,17 +54,21 @@ const getStepTitle = (
|
||||
const AssistantSteps = ({
|
||||
block,
|
||||
status,
|
||||
isLast,
|
||||
}: {
|
||||
block: ResearchBlock;
|
||||
status: 'answering' | 'completed' | 'error';
|
||||
isLast: boolean;
|
||||
}) => {
|
||||
const [isExpanded, setIsExpanded] = useState(true);
|
||||
const [isExpanded, setIsExpanded] = useState(
|
||||
isLast && status === 'answering' ? true : false,
|
||||
);
|
||||
const { researchEnded, loading } = useChat();
|
||||
|
||||
useEffect(() => {
|
||||
if (researchEnded) {
|
||||
if (researchEnded && isLast) {
|
||||
setIsExpanded(false);
|
||||
} else if (status === 'answering') {
|
||||
} else if (status === 'answering' && isLast) {
|
||||
setIsExpanded(true);
|
||||
}
|
||||
}, [researchEnded, status]);
|
||||
@@ -175,8 +186,10 @@ const AssistantSteps = ({
|
||||
: '';
|
||||
|
||||
return (
|
||||
<span
|
||||
<a
|
||||
key={idx}
|
||||
href={url}
|
||||
target="_blank"
|
||||
className="inline-flex items-center gap-1.5 px-2 py-0.5 rounded-md text-xs font-medium bg-light-100 dark:bg-dark-100 text-black/70 dark:text-white/70 border border-light-200 dark:border-dark-200"
|
||||
>
|
||||
{faviconUrl && (
|
||||
@@ -190,7 +203,50 @@ const AssistantSteps = ({
|
||||
/>
|
||||
)}
|
||||
<span className="line-clamp-1">{title}</span>
|
||||
</span>
|
||||
</a>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{step.type === 'upload_searching' &&
|
||||
step.queries.length > 0 && (
|
||||
<div className="flex flex-wrap gap-1.5 mt-1.5">
|
||||
{step.queries.map((query, idx) => (
|
||||
<span
|
||||
key={idx}
|
||||
className="inline-flex items-center px-2 py-0.5 rounded-md text-xs font-medium bg-light-100 dark:bg-dark-100 text-black/70 dark:text-white/70 border border-light-200 dark:border-dark-200"
|
||||
>
|
||||
{query}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{step.type === 'upload_search_results' &&
|
||||
step.results.length > 0 && (
|
||||
<div className="mt-1.5 grid gap-3 lg:grid-cols-3">
|
||||
{step.results.slice(0, 4).map((result, idx) => {
|
||||
const title =
|
||||
(result.metadata &&
|
||||
(result.metadata.title ||
|
||||
result.metadata.fileName)) ||
|
||||
'Untitled document';
|
||||
|
||||
return (
|
||||
<div
|
||||
key={idx}
|
||||
className="flex flex-row space-x-3 rounded-lg border border-light-200 dark:border-dark-200 bg-light-100 dark:bg-dark-100 p-2 cursor-pointer"
|
||||
>
|
||||
<div className="mt-0.5 h-10 w-10 rounded-md bg-cyan-100 text-cyan-800 dark:bg-sky-500 dark:text-cyan-50 flex items-center justify-center">
|
||||
<FileText className="w-5 h-5" />
|
||||
</div>
|
||||
<div className="flex flex-col justify-center">
|
||||
<p className="text-[13px] text-black dark:text-white line-clamp-1">
|
||||
{title}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
@@ -59,7 +59,7 @@ const Chat = () => {
|
||||
}, [messages]);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col space-y-6 pt-8 pb-28 sm:mx-4 md:mx-8">
|
||||
<div className="flex flex-col space-y-6 pt-8 pb-44 lg:pb-28 sm:mx-4 md:mx-8">
|
||||
{sections.map((section, i) => {
|
||||
const isLast = i === sections.length - 1;
|
||||
|
||||
@@ -80,7 +80,10 @@ const Chat = () => {
|
||||
{loading && !messageAppeared && <MessageBoxLoading />}
|
||||
<div ref={messageEnd} className="h-0" />
|
||||
{dividerWidth > 0 && (
|
||||
<div className="bottom-6 fixed z-40" style={{ width: dividerWidth }}>
|
||||
<div
|
||||
className="fixed z-40 bottom-24 lg:bottom-6"
|
||||
style={{ width: dividerWidth }}
|
||||
>
|
||||
<div
|
||||
className="pointer-events-none absolute -bottom-6 left-0 right-0 h-[calc(100%+24px+24px)] dark:hidden"
|
||||
style={{
|
||||
|
||||
@@ -6,7 +6,8 @@ import EmptyChat from './EmptyChat';
|
||||
import NextError from 'next/error';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import SettingsButtonMobile from './Settings/SettingsButtonMobile';
|
||||
import { Block, Chunk } from '@/lib/types';
|
||||
import { Block } from '@/lib/types';
|
||||
import Loader from './ui/Loader';
|
||||
|
||||
export interface BaseMessage {
|
||||
chatId: string;
|
||||
@@ -21,35 +22,6 @@ export interface Message extends BaseMessage {
|
||||
status: 'answering' | 'completed' | 'error';
|
||||
}
|
||||
|
||||
export interface UserMessage extends BaseMessage {
|
||||
role: 'user';
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface AssistantMessage extends BaseMessage {
|
||||
role: 'assistant';
|
||||
content: string;
|
||||
suggestions?: string[];
|
||||
}
|
||||
|
||||
export interface SourceMessage extends BaseMessage {
|
||||
role: 'source';
|
||||
sources: Chunk[];
|
||||
}
|
||||
|
||||
export interface SuggestionMessage extends BaseMessage {
|
||||
role: 'suggestion';
|
||||
suggestions: string[];
|
||||
}
|
||||
|
||||
export type LegacyMessage =
|
||||
| AssistantMessage
|
||||
| UserMessage
|
||||
| SourceMessage
|
||||
| SuggestionMessage;
|
||||
|
||||
export type ChatTurn = UserMessage | AssistantMessage;
|
||||
|
||||
export interface File {
|
||||
fileName: string;
|
||||
fileExtension: string;
|
||||
@@ -62,7 +34,8 @@ export interface Widget {
|
||||
}
|
||||
|
||||
const ChatWindow = () => {
|
||||
const { hasError, notFound, messages } = useChat();
|
||||
const { hasError, notFound, messages, isReady } = useChat();
|
||||
|
||||
if (hasError) {
|
||||
return (
|
||||
<div className="relative">
|
||||
@@ -78,18 +51,24 @@ const ChatWindow = () => {
|
||||
);
|
||||
}
|
||||
|
||||
return notFound ? (
|
||||
<NextError statusCode={404} />
|
||||
return isReady ? (
|
||||
notFound ? (
|
||||
<NextError statusCode={404} />
|
||||
) : (
|
||||
<div>
|
||||
{messages.length > 0 ? (
|
||||
<>
|
||||
<Navbar />
|
||||
<Chat />
|
||||
</>
|
||||
) : (
|
||||
<EmptyChat />
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
) : (
|
||||
<div>
|
||||
{messages.length > 0 ? (
|
||||
<>
|
||||
<Navbar />
|
||||
<Chat />
|
||||
</>
|
||||
) : (
|
||||
<EmptyChat />
|
||||
)}
|
||||
<div className="flex items-center justify-center min-h-screen w-full">
|
||||
<Loader />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { ArrowRight } from 'lucide-react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import TextareaAutosize from 'react-textarea-autosize';
|
||||
import Focus from './MessageInputActions/Focus';
|
||||
import Sources from './MessageInputActions/Sources';
|
||||
import Optimization from './MessageInputActions/Optimization';
|
||||
import Attach from './MessageInputActions/Attach';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
@@ -68,8 +68,8 @@ const EmptyChatMessageInput = () => {
|
||||
<Optimization />
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
<Sources />
|
||||
<ModelSelector />
|
||||
<Focus />
|
||||
<Attach />
|
||||
</div>
|
||||
<button
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Check, ClipboardList } from 'lucide-react';
|
||||
import { Message } from '../ChatWindow';
|
||||
import { useState } from 'react';
|
||||
import { Section } from '@/lib/hooks/useChat';
|
||||
import { SourceBlock } from '@/lib/types';
|
||||
|
||||
const Copy = ({
|
||||
section,
|
||||
@@ -15,15 +16,25 @@ const Copy = ({
|
||||
return (
|
||||
<button
|
||||
onClick={() => {
|
||||
const sources = section.message.responseBlocks.filter(
|
||||
(b) => b.type === 'source' && b.data.length > 0,
|
||||
) as SourceBlock[];
|
||||
|
||||
const contentToCopy = `${initialMessage}${
|
||||
section?.message.responseBlocks.filter((b) => b.type === 'source')
|
||||
?.length > 0 &&
|
||||
`\n\nCitations:\n${section.message.responseBlocks
|
||||
.filter((b) => b.type === 'source')
|
||||
?.map((source: any, i: any) => `[${i + 1}] ${source.metadata.url}`)
|
||||
.join(`\n`)}`
|
||||
sources.length > 0
|
||||
? `\n\nCitations:\n${sources
|
||||
.map((source) => source.data)
|
||||
.flat()
|
||||
.map(
|
||||
(s, i) =>
|
||||
`[${i + 1}] ${s.metadata.url.startsWith('file_id://') ? s.metadata.fileName || 'Uploaded File' : s.metadata.url}`,
|
||||
)
|
||||
.join(`\n`)}`
|
||||
: ''
|
||||
}`;
|
||||
|
||||
navigator.clipboard.writeText(contentToCopy);
|
||||
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 1000);
|
||||
}}
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
Plus,
|
||||
CornerDownRight,
|
||||
} from 'lucide-react';
|
||||
import Markdown, { MarkdownToJSX } from 'markdown-to-jsx';
|
||||
import Markdown, { MarkdownToJSX, RuleType } from 'markdown-to-jsx';
|
||||
import Copy from './MessageActions/Copy';
|
||||
import Rewrite from './MessageActions/Rewrite';
|
||||
import MessageSources from './MessageSources';
|
||||
@@ -21,10 +21,11 @@ import SearchVideos from './SearchVideos';
|
||||
import { useSpeech } from 'react-text-to-speech';
|
||||
import ThinkBox from './ThinkBox';
|
||||
import { useChat, Section } from '@/lib/hooks/useChat';
|
||||
import Citation from './Citation';
|
||||
import Citation from './MessageRenderer/Citation';
|
||||
import AssistantSteps from './AssistantSteps';
|
||||
import { ResearchBlock } from '@/lib/types';
|
||||
import Renderer from './Widgets/Renderer';
|
||||
import CodeBlock from './MessageRenderer/CodeBlock';
|
||||
|
||||
const ThinkTagProcessor = ({
|
||||
children,
|
||||
@@ -49,7 +50,14 @@ const MessageBox = ({
|
||||
dividerRef?: MutableRefObject<HTMLDivElement | null>;
|
||||
isLast: boolean;
|
||||
}) => {
|
||||
const { loading, sendMessage, rewrite, messages, researchEnded } = useChat();
|
||||
const {
|
||||
loading,
|
||||
sendMessage,
|
||||
rewrite,
|
||||
messages,
|
||||
researchEnded,
|
||||
chatHistory,
|
||||
} = useChat();
|
||||
|
||||
const parsedMessage = section.parsedTextBlocks.join('\n\n');
|
||||
const speechMessage = section.speechMessage || '';
|
||||
@@ -67,6 +75,21 @@ const MessageBox = ({
|
||||
const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
|
||||
|
||||
const markdownOverrides: MarkdownToJSX.Options = {
|
||||
renderRule(next, node, renderChildren, state) {
|
||||
if (node.type === RuleType.codeInline) {
|
||||
return `\`${node.text}\``;
|
||||
}
|
||||
|
||||
if (node.type === RuleType.codeBlock) {
|
||||
return (
|
||||
<CodeBlock key={state.key} language={node.lang || ''}>
|
||||
{node.text}
|
||||
</CodeBlock>
|
||||
);
|
||||
}
|
||||
|
||||
return next();
|
||||
},
|
||||
overrides: {
|
||||
think: {
|
||||
component: ThinkTagProcessor,
|
||||
@@ -115,12 +138,11 @@ const MessageBox = ({
|
||||
<AssistantSteps
|
||||
block={researchBlock}
|
||||
status={section.message.status}
|
||||
isLast={isLast}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{section.widgets.length > 0 && <Renderer widgets={section.widgets} />}
|
||||
|
||||
{isLast &&
|
||||
loading &&
|
||||
!researchEnded &&
|
||||
@@ -135,6 +157,8 @@ const MessageBox = ({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{section.widgets.length > 0 && <Renderer widgets={section.widgets} />}
|
||||
|
||||
<div className="flex flex-col space-y-2">
|
||||
{sources.length > 0 && (
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
@@ -218,10 +242,10 @@ const MessageBox = ({
|
||||
className="group w-full py-4 text-left transition-colors duration-200"
|
||||
>
|
||||
<div className="flex items-center justify-between gap-3">
|
||||
<div className="flex flex-row space-x-3 items-center ">
|
||||
<div className="flex flex-row space-x-3 items-center">
|
||||
<CornerDownRight
|
||||
size={17}
|
||||
className="group-hover:text-sky-400 transition-colors duration-200"
|
||||
size={15}
|
||||
className="group-hover:text-sky-400 transition-colors duration-200 flex-shrink-0"
|
||||
/>
|
||||
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-sky-400 transition-colors duration-200 leading-relaxed">
|
||||
{suggestion}
|
||||
@@ -248,11 +272,11 @@ const MessageBox = ({
|
||||
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
||||
<SearchImages
|
||||
query={section.message.query}
|
||||
chatHistory={messages}
|
||||
chatHistory={chatHistory}
|
||||
messageId={section.message.messageId}
|
||||
/>
|
||||
<SearchVideos
|
||||
chatHistory={messages}
|
||||
chatHistory={chatHistory}
|
||||
query={section.message.query}
|
||||
messageId={section.message.messageId}
|
||||
/>
|
||||
|
||||
@@ -2,7 +2,6 @@ import { cn } from '@/lib/utils';
|
||||
import { ArrowUp } from 'lucide-react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import TextareaAutosize from 'react-textarea-autosize';
|
||||
import CopilotToggle from './MessageInputActions/Copilot';
|
||||
import AttachSmall from './MessageInputActions/AttachSmall';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
|
||||
@@ -62,7 +61,7 @@ const MessageInput = () => {
|
||||
}
|
||||
}}
|
||||
className={cn(
|
||||
'relative bg-light-secondary dark:bg-dark-secondary p-4 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 shadow-sm shadow-light-200/10 dark:shadow-black/20 transition-all duration-200 focus-within:border-light-300 dark:focus-within:border-dark-300',
|
||||
'relative bg-light-secondary dark:bg-dark-secondary p-4 flex items-center overflow-visible border border-light-200 dark:border-dark-200 shadow-sm shadow-light-200/10 dark:shadow-black/20 transition-all duration-200 focus-within:border-light-300 dark:focus-within:border-dark-300',
|
||||
mode === 'multi' ? 'flex-col rounded-2xl' : 'flex-row rounded-full',
|
||||
)}
|
||||
>
|
||||
@@ -78,11 +77,16 @@ const MessageInput = () => {
|
||||
placeholder="Ask a follow-up"
|
||||
/>
|
||||
{mode === 'single' && (
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<CopilotToggle
|
||||
copilotEnabled={copilotEnabled}
|
||||
setCopilotEnabled={setCopilotEnabled}
|
||||
/>
|
||||
<button
|
||||
disabled={message.trim().length === 0 || loading}
|
||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||
>
|
||||
<ArrowUp className="bg-background" size={17} />
|
||||
</button>
|
||||
)}
|
||||
{mode === 'multi' && (
|
||||
<div className="flex flex-row items-center justify-between w-full pt-2">
|
||||
<AttachSmall />
|
||||
<button
|
||||
disabled={message.trim().length === 0 || loading}
|
||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||
@@ -91,23 +95,6 @@ const MessageInput = () => {
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{mode === 'multi' && (
|
||||
<div className="flex flex-row items-center justify-between w-full pt-2">
|
||||
<AttachSmall />
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<CopilotToggle
|
||||
copilotEnabled={copilotEnabled}
|
||||
setCopilotEnabled={setCopilotEnabled}
|
||||
/>
|
||||
<button
|
||||
disabled={message.trim().length === 0 || loading}
|
||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||
>
|
||||
<ArrowUp className="bg-background" size={17} />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</form>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -16,6 +16,8 @@ import {
|
||||
} from 'lucide-react';
|
||||
import { Fragment, useRef, useState } from 'react';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import { AnimatePresence } from 'motion/react';
|
||||
import { motion } from 'framer-motion';
|
||||
|
||||
const Attach = () => {
|
||||
const { files, setFiles, setFileIds, fileIds } = useChat();
|
||||
@@ -53,86 +55,95 @@ const Attach = () => {
|
||||
|
||||
return loading ? (
|
||||
<div className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none text-black/50 dark:text-white/50 transition duration-200">
|
||||
<LoaderCircle size={16} className="text-sky-400 animate-spin" />
|
||||
<LoaderCircle size={16} className="text-sky-500 animate-spin" />
|
||||
</div>
|
||||
) : files.length > 0 ? (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<File size={16} className="text-sky-400" />
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-150"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-150"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[350px] right-0">
|
||||
<div className="bg-light-primary dark:bg-dark-primary border rounded-md border-light-200 dark:border-dark-200 w-full max-h-[200px] md:max-h-none overflow-y-auto flex flex-col">
|
||||
<div className="flex flex-row items-center justify-between px-3 py-2">
|
||||
<h4 className="text-black dark:text-white font-medium text-sm">
|
||||
Attached files
|
||||
</h4>
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200 focus:outline-none"
|
||||
{({ open }) => (
|
||||
<>
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<File size={16} className="text-sky-500" />
|
||||
</PopoverButton>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<PopoverPanel
|
||||
className="absolute z-10 w-64 md:w-[350px] right-0"
|
||||
static
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0, scale: 0.9 }}
|
||||
animate={{ opacity: 1, scale: 1 }}
|
||||
exit={{ opacity: 0, scale: 0.9 }}
|
||||
transition={{ duration: 0.1, ease: 'easeOut' }}
|
||||
className="origin-top-right bg-light-primary dark:bg-dark-primary border rounded-md border-light-200 dark:border-dark-200 w-full max-h-[200px] md:max-h-none overflow-y-auto flex flex-col"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
onChange={handleChange}
|
||||
ref={fileInputRef}
|
||||
accept=".pdf,.docx,.txt"
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Plus size={16} />
|
||||
<p className="text-xs">Add</p>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200 focus:outline-none"
|
||||
>
|
||||
<Trash size={14} />
|
||||
<p className="text-xs">Clear</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="h-[0.5px] mx-2 bg-white/10" />
|
||||
<div className="flex flex-col items-center">
|
||||
{files.map((file, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className="flex flex-row items-center justify-start w-full space-x-3 p-3"
|
||||
>
|
||||
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||
<File
|
||||
size={16}
|
||||
className="text-black/70 dark:text-white/70"
|
||||
/>
|
||||
<div className="flex flex-row items-center justify-between px-3 py-2">
|
||||
<h4 className="text-black/70 dark:text-white/70 text-sm">
|
||||
Attached files
|
||||
</h4>
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200 focus:outline-none"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
onChange={handleChange}
|
||||
ref={fileInputRef}
|
||||
accept=".pdf,.docx,.txt"
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Plus size={16} />
|
||||
<p className="text-xs">Add</p>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200 focus:outline-none"
|
||||
>
|
||||
<Trash size={13} />
|
||||
<p className="text-xs">Clear</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
{file.fileName.length > 25
|
||||
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
||||
'...' +
|
||||
file.fileExtension
|
||||
: file.fileName}
|
||||
</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
<div className="h-[0.5px] mx-2 bg-white/10" />
|
||||
<div className="flex flex-col items-center">
|
||||
{files.map((file, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className="flex flex-row items-center justify-start w-full space-x-3 p-3"
|
||||
>
|
||||
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-9 h-9 rounded-md">
|
||||
<File
|
||||
size={16}
|
||||
className="text-black/70 dark:text-white/70"
|
||||
/>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{file.fileName.length > 25
|
||||
? file.fileName
|
||||
.replace(/\.\w+$/, '')
|
||||
.substring(0, 25) +
|
||||
'...' +
|
||||
file.fileExtension
|
||||
: file.fileName}
|
||||
</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</motion.div>
|
||||
</PopoverPanel>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
)}
|
||||
</Popover>
|
||||
) : (
|
||||
<button
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
Popover,
|
||||
PopoverButton,
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import {
|
||||
CopyPlus,
|
||||
File,
|
||||
LoaderCircle,
|
||||
Paperclip,
|
||||
Plus,
|
||||
Trash,
|
||||
} from 'lucide-react';
|
||||
import { File, LoaderCircle, Paperclip, Plus, Trash } from 'lucide-react';
|
||||
import { Fragment, useRef, useState } from 'react';
|
||||
import { File as FileType } from '../ChatWindow';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import { AnimatePresence } from 'motion/react';
|
||||
import { motion } from 'framer-motion';
|
||||
|
||||
const AttachSmall = () => {
|
||||
const { files, setFiles, setFileIds, fileIds } = useChat();
|
||||
@@ -53,86 +46,95 @@ const AttachSmall = () => {
|
||||
|
||||
return loading ? (
|
||||
<div className="flex flex-row items-center justify-between space-x-1 p-1 ">
|
||||
<LoaderCircle size={20} className="text-sky-400 animate-spin" />
|
||||
<LoaderCircle size={20} className="text-sky-500 animate-spin" />
|
||||
</div>
|
||||
) : files.length > 0 ? (
|
||||
<Popover className="max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="flex flex-row items-center justify-between space-x-1 p-1 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<File size={20} className="text-sky-400" />
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-150"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-150"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[350px] bottom-14 -ml-3">
|
||||
<div className="bg-light-primary dark:bg-dark-primary border rounded-md border-light-200 dark:border-dark-200 w-full max-h-[200px] md:max-h-none overflow-y-auto flex flex-col">
|
||||
<div className="flex flex-row items-center justify-between px-3 py-2">
|
||||
<h4 className="text-black dark:text-white font-medium text-sm">
|
||||
Attached files
|
||||
</h4>
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
{({ open }) => (
|
||||
<>
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="flex flex-row items-center justify-between space-x-1 p-1 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<File size={20} className="text-sky-500" />
|
||||
</PopoverButton>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<PopoverPanel
|
||||
className="absolute z-10 w-64 md:w-[350px] bottom-14"
|
||||
static
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0, scale: 0.9 }}
|
||||
animate={{ opacity: 1, scale: 1 }}
|
||||
exit={{ opacity: 0, scale: 0.9 }}
|
||||
transition={{ duration: 0.1, ease: 'easeOut' }}
|
||||
className="origin-bottom-left bg-light-primary dark:bg-dark-primary border rounded-md border-light-200 dark:border-dark-200 w-full max-h-[200px] md:max-h-none overflow-y-auto flex flex-col"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
onChange={handleChange}
|
||||
ref={fileInputRef}
|
||||
accept=".pdf,.docx,.txt"
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Plus size={18} />
|
||||
<p className="text-xs">Add</p>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<Trash size={14} />
|
||||
<p className="text-xs">Clear</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="h-[0.5px] mx-2 bg-white/10" />
|
||||
<div className="flex flex-col items-center">
|
||||
{files.map((file, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className="flex flex-row items-center justify-start w-full space-x-3 p-3"
|
||||
>
|
||||
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
|
||||
<File
|
||||
size={16}
|
||||
className="text-black/70 dark:text-white/70"
|
||||
/>
|
||||
<div className="flex flex-row items-center justify-between px-3 py-2">
|
||||
<h4 className="text-black/70 dark:text-white/70 font-medium text-sm">
|
||||
Attached files
|
||||
</h4>
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
onChange={handleChange}
|
||||
ref={fileInputRef}
|
||||
accept=".pdf,.docx,.txt"
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Plus size={16} />
|
||||
<p className="text-xs">Add</p>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<Trash size={13} />
|
||||
<p className="text-xs">Clear</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
{file.fileName.length > 25
|
||||
? file.fileName.replace(/\.\w+$/, '').substring(0, 25) +
|
||||
'...' +
|
||||
file.fileExtension
|
||||
: file.fileName}
|
||||
</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
<div className="h-[0.5px] mx-2 bg-white/10" />
|
||||
<div className="flex flex-col items-center">
|
||||
{files.map((file, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className="flex flex-row items-center justify-start w-full space-x-3 p-3"
|
||||
>
|
||||
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-9 h-9 rounded-md">
|
||||
<File
|
||||
size={16}
|
||||
className="text-black/70 dark:text-white/70"
|
||||
/>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{file.fileName.length > 25
|
||||
? file.fileName
|
||||
.replace(/\.\w+$/, '')
|
||||
.substring(0, 25) +
|
||||
'...' +
|
||||
file.fileExtension
|
||||
: file.fileName}
|
||||
</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</motion.div>
|
||||
</PopoverPanel>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
)}
|
||||
</Popover>
|
||||
) : (
|
||||
<button
|
||||
|
||||
@@ -2,15 +2,11 @@
|
||||
|
||||
import { Cpu, Loader2, Search } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
Popover,
|
||||
PopoverButton,
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import { Fragment, useEffect, useMemo, useState } from 'react';
|
||||
import { Popover, PopoverButton, PopoverPanel } from '@headlessui/react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { MinimalProvider } from '@/lib/models/types';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import { AnimatePresence, motion } from 'motion/react';
|
||||
|
||||
const ModelSelector = () => {
|
||||
const [providers, setProviders] = useState<MinimalProvider[]>([]);
|
||||
@@ -79,119 +75,127 @@ const ModelSelector = () => {
|
||||
|
||||
return (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<Cpu size={16} className="text-sky-500" />
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-100"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-100"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-[230px] sm:w-[270px] md:w-[300px] -right-4">
|
||||
<div className="bg-light-primary dark:bg-dark-primary max-h-[300px] sm:max-w-none border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden">
|
||||
<div className="p-4 border-b border-light-200 dark:border-dark-200">
|
||||
<div className="relative">
|
||||
<Search
|
||||
size={16}
|
||||
className="absolute left-3 top-1/2 -translate-y-1/2 text-black/40 dark:text-white/40"
|
||||
/>
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search models..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="w-full pl-9 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg placeholder:text-sm text-sm text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none focus:ring-2 focus:ring-sky-500/20 border border-transparent focus:border-sky-500/30 transition duration-200"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{({ open }) => (
|
||||
<>
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<Cpu size={16} className="text-sky-500" />
|
||||
</PopoverButton>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<PopoverPanel
|
||||
className="absolute z-10 w-[230px] sm:w-[270px] md:w-[300px] right-0"
|
||||
static
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0, scale: 0.9 }}
|
||||
animate={{ opacity: 1, scale: 1 }}
|
||||
exit={{ opacity: 0, scale: 0.9 }}
|
||||
transition={{ duration: 0.1, ease: 'easeOut' }}
|
||||
className="origin-top-right bg-light-primary dark:bg-dark-primary max-h-[300px] sm:max-w-none border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden"
|
||||
>
|
||||
<div className="p-2 border-b border-light-200 dark:border-dark-200">
|
||||
<div className="relative">
|
||||
<Search
|
||||
size={16}
|
||||
className="absolute left-3 top-1/2 -translate-y-1/2 text-black/40 dark:text-white/40"
|
||||
/>
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search models..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="w-full pl-8 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg placeholder:text-xs placeholder:-translate-y-[1.5px] text-xs text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none border border-transparent transition duration-200"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="max-h-[320px] overflow-y-auto">
|
||||
{isLoading ? (
|
||||
<div className="flex items-center justify-center py-16">
|
||||
<Loader2
|
||||
className="animate-spin text-black/40 dark:text-white/40"
|
||||
size={24}
|
||||
/>
|
||||
</div>
|
||||
) : filteredProviders.length === 0 ? (
|
||||
<div className="text-center py-16 px-4 text-black/60 dark:text-white/60 text-sm">
|
||||
{searchQuery
|
||||
? 'No models found'
|
||||
: 'No chat models configured'}
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col">
|
||||
{filteredProviders.map((provider, providerIndex) => (
|
||||
<div key={provider.id}>
|
||||
<div className="px-4 py-2.5 sticky top-0 bg-light-primary dark:bg-dark-primary border-b border-light-200/50 dark:border-dark-200/50">
|
||||
<p className="text-xs text-black/50 dark:text-white/50 uppercase tracking-wider">
|
||||
{provider.name}
|
||||
</p>
|
||||
<div className="max-h-[320px] overflow-y-auto">
|
||||
{isLoading ? (
|
||||
<div className="flex items-center justify-center py-16">
|
||||
<Loader2
|
||||
className="animate-spin text-black/40 dark:text-white/40"
|
||||
size={24}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col px-2 py-2 space-y-0.5">
|
||||
{provider.chatModels.map((model) => (
|
||||
<button
|
||||
key={model.key}
|
||||
onClick={() =>
|
||||
handleModelSelect(provider.id, model.key)
|
||||
}
|
||||
type="button"
|
||||
className={cn(
|
||||
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
|
||||
chatModelProvider?.providerId === provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center space-x-2.5 min-w-0 flex-1">
|
||||
<Cpu
|
||||
size={15}
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'text-sky-500'
|
||||
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
|
||||
)}
|
||||
/>
|
||||
<p
|
||||
className={cn(
|
||||
'text-sm truncate',
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'text-sky-500 font-medium'
|
||||
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
|
||||
)}
|
||||
>
|
||||
{model.name}
|
||||
) : filteredProviders.length === 0 ? (
|
||||
<div className="text-center py-16 px-4 text-black/60 dark:text-white/60 text-sm">
|
||||
{searchQuery
|
||||
? 'No models found'
|
||||
: 'No chat models configured'}
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col">
|
||||
{filteredProviders.map((provider, providerIndex) => (
|
||||
<div key={provider.id}>
|
||||
<div className="px-4 py-2.5 sticky top-0 bg-light-primary dark:bg-dark-primary border-b border-light-200/50 dark:border-dark-200/50">
|
||||
<p className="text-xs text-black/50 dark:text-white/50 uppercase tracking-wider">
|
||||
{provider.name}
|
||||
</p>
|
||||
</div>
|
||||
</button>
|
||||
|
||||
<div className="flex flex-col px-2 py-2 space-y-0.5">
|
||||
{provider.chatModels.map((model) => (
|
||||
<button
|
||||
key={model.key}
|
||||
onClick={() =>
|
||||
handleModelSelect(provider.id, model.key)
|
||||
}
|
||||
type="button"
|
||||
className={cn(
|
||||
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center space-x-2.5 min-w-0 flex-1">
|
||||
<Cpu
|
||||
size={15}
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'text-sky-500'
|
||||
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
|
||||
)}
|
||||
/>
|
||||
<p
|
||||
className={cn(
|
||||
'text-xs truncate',
|
||||
chatModelProvider?.providerId ===
|
||||
provider.id &&
|
||||
chatModelProvider?.key === model.key
|
||||
? 'text-sky-500 font-medium'
|
||||
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
|
||||
)}
|
||||
>
|
||||
{model.name}
|
||||
</p>
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{providerIndex < filteredProviders.length - 1 && (
|
||||
<div className="h-px bg-light-200 dark:bg-dark-200" />
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{providerIndex < filteredProviders.length - 1 && (
|
||||
<div className="h-px bg-light-200 dark:bg-dark-200" />
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
)}
|
||||
</div>
|
||||
</motion.div>
|
||||
</PopoverPanel>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
)}
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Switch } from '@headlessui/react';
|
||||
|
||||
const CopilotToggle = ({
|
||||
copilotEnabled,
|
||||
setCopilotEnabled,
|
||||
}: {
|
||||
copilotEnabled: boolean;
|
||||
setCopilotEnabled: (enabled: boolean) => void;
|
||||
}) => {
|
||||
return (
|
||||
<div className="group flex flex-row items-center space-x-1 active:scale-95 duration-200 transition cursor-pointer">
|
||||
<Switch
|
||||
checked={copilotEnabled}
|
||||
onChange={setCopilotEnabled}
|
||||
className="bg-light-secondary dark:bg-dark-secondary border border-light-200/70 dark:border-dark-200 relative inline-flex h-5 w-10 sm:h-6 sm:w-11 items-center rounded-full"
|
||||
>
|
||||
<span className="sr-only">Copilot</span>
|
||||
<span
|
||||
className={cn(
|
||||
copilotEnabled
|
||||
? 'translate-x-6 bg-[#24A0ED]'
|
||||
: 'translate-x-1 bg-black/50 dark:bg-white/50',
|
||||
'inline-block h-3 w-3 sm:h-4 sm:w-4 transform rounded-full transition-all duration-200',
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
<p
|
||||
onClick={() => setCopilotEnabled(!copilotEnabled)}
|
||||
className={cn(
|
||||
'text-xs font-medium transition-colors duration-150 ease-in-out',
|
||||
copilotEnabled
|
||||
? 'text-[#24A0ED]'
|
||||
: 'text-black/50 dark:text-white/50 group-hover:text-black dark:group-hover:text-white',
|
||||
)}
|
||||
>
|
||||
Copilot
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default CopilotToggle;
|
||||
@@ -1,123 +0,0 @@
|
||||
import {
|
||||
BadgePercent,
|
||||
ChevronDown,
|
||||
Globe,
|
||||
Pencil,
|
||||
ScanEye,
|
||||
SwatchBook,
|
||||
} from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
Popover,
|
||||
PopoverButton,
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons';
|
||||
import { Fragment } from 'react';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
|
||||
const focusModes = [
|
||||
{
|
||||
key: 'webSearch',
|
||||
title: 'All',
|
||||
description: 'Searches across all of the internet',
|
||||
icon: <Globe size={16} />,
|
||||
},
|
||||
{
|
||||
key: 'academicSearch',
|
||||
title: 'Academic',
|
||||
description: 'Search in published academic papers',
|
||||
icon: <SwatchBook size={16} />,
|
||||
},
|
||||
{
|
||||
key: 'writingAssistant',
|
||||
title: 'Writing',
|
||||
description: 'Chat without searching the web',
|
||||
icon: <Pencil size={16} />,
|
||||
},
|
||||
{
|
||||
key: 'wolframAlphaSearch',
|
||||
title: 'Wolfram Alpha',
|
||||
description: 'Computational knowledge engine',
|
||||
icon: <BadgePercent size={16} />,
|
||||
},
|
||||
{
|
||||
key: 'youtubeSearch',
|
||||
title: 'Youtube',
|
||||
description: 'Search and watch videos',
|
||||
icon: <SiYoutube className="h-[16px] w-auto mr-0.5" />,
|
||||
},
|
||||
{
|
||||
key: 'redditSearch',
|
||||
title: 'Reddit',
|
||||
description: 'Search for discussions and opinions',
|
||||
icon: <SiReddit className="h-[16px] w-auto mr-0.5" />,
|
||||
},
|
||||
];
|
||||
|
||||
const Focus = () => {
|
||||
const { focusMode, setFocusMode } = useChat();
|
||||
|
||||
return (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
{focusMode !== 'webSearch' ? (
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{focusModes.find((mode) => mode.key === focusMode)?.icon}
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
<Globe size={16} />
|
||||
</div>
|
||||
)}
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-150"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-150"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[500px] -right-4">
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{focusModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => setFocusMode(mode.key)}
|
||||
key={i}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition focus:outline-none',
|
||||
focusMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-row items-center space-x-1',
|
||||
focusMode === mode.key
|
||||
? 'text-[#24A0ED]'
|
||||
: 'text-black dark:text-white',
|
||||
)}
|
||||
>
|
||||
{mode.icon}
|
||||
<p className="text-sm font-medium">{mode.title}</p>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{mode.description}
|
||||
</p>
|
||||
</PopoverButton>
|
||||
))}
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
||||
export default Focus;
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from '@headlessui/react';
|
||||
import { Fragment } from 'react';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import { AnimatePresence, motion } from 'motion/react';
|
||||
|
||||
const OptimizationModes = [
|
||||
{
|
||||
@@ -60,40 +61,50 @@ const Optimization = () => {
|
||||
/>
|
||||
</div>
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-150"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-150"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] left-0">
|
||||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{OptimizationModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => setOptimizationMode(mode.key)}
|
||||
key={i}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition focus:outline-none',
|
||||
optimizationMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1 text-black dark:text-white">
|
||||
{mode.icon}
|
||||
<p className="text-sm font-medium">{mode.title}</p>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{mode.description}
|
||||
</p>
|
||||
</PopoverButton>
|
||||
))}
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<PopoverPanel
|
||||
className="absolute z-10 w-64 md:w-[250px] left-0"
|
||||
static
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0, scale: 0.9 }}
|
||||
animate={{ opacity: 1, scale: 1 }}
|
||||
exit={{ opacity: 0, scale: 0.9 }}
|
||||
transition={{ duration: 0.1, ease: 'easeOut' }}
|
||||
className="origin-top-left flex flex-col space-y-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-2 max-h-[200px] md:max-h-none overflow-y-auto"
|
||||
>
|
||||
{OptimizationModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => setOptimizationMode(mode.key)}
|
||||
key={i}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition focus:outline-none',
|
||||
optimizationMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-row justify-between w-full text-black dark:text-white">
|
||||
<div className="flex flex-row space-x-1">
|
||||
{mode.icon}
|
||||
<p className="text-xs font-medium">{mode.title}</p>
|
||||
</div>
|
||||
{mode.key === 'quality' && (
|
||||
<span className="bg-sky-500/70 dark:bg-sky-500/40 border border-sky-600 px-1 rounded-full text-[10px] text-white">
|
||||
Beta
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{mode.description}
|
||||
</p>
|
||||
</PopoverButton>
|
||||
))}
|
||||
</motion.div>
|
||||
</PopoverPanel>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
)}
|
||||
</Popover>
|
||||
|
||||
93
src/components/MessageInputActions/Sources.tsx
Normal file
93
src/components/MessageInputActions/Sources.tsx
Normal file
@@ -0,0 +1,93 @@
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import {
|
||||
Popover,
|
||||
PopoverButton,
|
||||
PopoverPanel,
|
||||
Switch,
|
||||
} from '@headlessui/react';
|
||||
import {
|
||||
GlobeIcon,
|
||||
GraduationCapIcon,
|
||||
NetworkIcon,
|
||||
} from '@phosphor-icons/react';
|
||||
import { AnimatePresence, motion } from 'motion/react';
|
||||
|
||||
const sourcesList = [
|
||||
{
|
||||
name: 'Web',
|
||||
key: 'web',
|
||||
icon: <GlobeIcon className="h-[16px] w-auto" />,
|
||||
},
|
||||
{
|
||||
name: 'Academic',
|
||||
key: 'academic',
|
||||
icon: <GraduationCapIcon className="h-[16px] w-auto" />,
|
||||
},
|
||||
{
|
||||
name: 'Social',
|
||||
key: 'discussions',
|
||||
icon: <NetworkIcon className="h-[16px] w-auto" />,
|
||||
},
|
||||
];
|
||||
|
||||
const Sources = () => {
|
||||
const { sources, setSources } = useChat();
|
||||
|
||||
return (
|
||||
<Popover className="relative">
|
||||
{({ open }) => (
|
||||
<>
|
||||
<PopoverButton className="flex items-center justify-center active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white">
|
||||
<GlobeIcon className="h-[18px] w-auto" />
|
||||
</PopoverButton>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<PopoverPanel
|
||||
static
|
||||
className="absolute z-10 w-64 md:w-[225px] right-0"
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0, scale: 0.9 }}
|
||||
animate={{ opacity: 1, scale: 1 }}
|
||||
exit={{ opacity: 0, scale: 0.9 }}
|
||||
transition={{ duration: 0.1, ease: 'easeOut' }}
|
||||
className="origin-top-right flex flex-col bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-1 max-h-[200px] md:max-h-none overflow-y-auto shadow-lg"
|
||||
>
|
||||
{sourcesList.map((source, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className="flex flex-row justify-between hover:bg-light-100 hover:dark:bg-dark-100 rounded-md py-3 px-2 cursor-pointer"
|
||||
onClick={() => {
|
||||
if (!sources.includes(source.key)) {
|
||||
setSources([...sources, source.key]);
|
||||
} else {
|
||||
setSources(sources.filter((s) => s !== source.key));
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-row space-x-1.5 text-black/80 dark:text-white/80">
|
||||
{source.icon}
|
||||
<p className="text-xs">{source.name}</p>
|
||||
</div>
|
||||
<Switch
|
||||
checked={sources.includes(source.key)}
|
||||
className="group relative flex h-4 w-7 shrink-0 cursor-pointer rounded-full bg-light-200 dark:bg-white/10 p-0.5 duration-200 ease-in-out focus:outline-none transition-colors disabled:opacity-60 disabled:cursor-not-allowed data-[checked]:bg-sky-500 dark:data-[checked]:bg-sky-500"
|
||||
>
|
||||
<span
|
||||
aria-hidden="true"
|
||||
className="pointer-events-none inline-block size-3 translate-x-[1px] group-data-[checked]:translate-x-3 rounded-full bg-white shadow-lg ring-0 transition duration-200 ease-in-out"
|
||||
/>
|
||||
</Switch>
|
||||
</div>
|
||||
))}
|
||||
</motion.div>
|
||||
</PopoverPanel>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
)}
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
||||
export default Sources;
|
||||
102
src/components/MessageRenderer/CodeBlock/CodeBlockDarkTheme.ts
Normal file
102
src/components/MessageRenderer/CodeBlock/CodeBlockDarkTheme.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import type { CSSProperties } from 'react';
|
||||
|
||||
const darkTheme = {
|
||||
'hljs-comment': {
|
||||
color: '#8b949e',
|
||||
},
|
||||
'hljs-quote': {
|
||||
color: '#8b949e',
|
||||
},
|
||||
'hljs-variable': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-template-variable': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-tag': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-name': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-selector-id': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-selector-class': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-regexp': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-deletion': {
|
||||
color: '#ff7b72',
|
||||
},
|
||||
'hljs-number': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-built_in': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-builtin-name': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-literal': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-type': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-params': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-meta': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-link': {
|
||||
color: '#f2cc60',
|
||||
},
|
||||
'hljs-attribute': {
|
||||
color: '#58a6ff',
|
||||
},
|
||||
'hljs-string': {
|
||||
color: '#7ee787',
|
||||
},
|
||||
'hljs-symbol': {
|
||||
color: '#7ee787',
|
||||
},
|
||||
'hljs-bullet': {
|
||||
color: '#7ee787',
|
||||
},
|
||||
'hljs-addition': {
|
||||
color: '#7ee787',
|
||||
},
|
||||
'hljs-title': {
|
||||
color: '#79c0ff',
|
||||
},
|
||||
'hljs-section': {
|
||||
color: '#79c0ff',
|
||||
},
|
||||
'hljs-keyword': {
|
||||
color: '#c297ff',
|
||||
},
|
||||
'hljs-selector-tag': {
|
||||
color: '#c297ff',
|
||||
},
|
||||
hljs: {
|
||||
display: 'block',
|
||||
overflowX: 'auto',
|
||||
background: '#0d1117',
|
||||
color: '#c9d1d9',
|
||||
padding: '0.75em',
|
||||
border: '1px solid #21262d',
|
||||
borderRadius: '10px',
|
||||
},
|
||||
'hljs-emphasis': {
|
||||
fontStyle: 'italic',
|
||||
},
|
||||
'hljs-strong': {
|
||||
fontWeight: 'bold',
|
||||
},
|
||||
} satisfies Record<string, CSSProperties>;
|
||||
|
||||
export default darkTheme;
|
||||
102
src/components/MessageRenderer/CodeBlock/CodeBlockLightTheme.ts
Normal file
102
src/components/MessageRenderer/CodeBlock/CodeBlockLightTheme.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import type { CSSProperties } from 'react';
|
||||
|
||||
const lightTheme = {
|
||||
'hljs-comment': {
|
||||
color: '#6e7781',
|
||||
},
|
||||
'hljs-quote': {
|
||||
color: '#6e7781',
|
||||
},
|
||||
'hljs-variable': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-template-variable': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-tag': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-name': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-selector-id': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-selector-class': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-regexp': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-deletion': {
|
||||
color: '#d73a49',
|
||||
},
|
||||
'hljs-number': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-built_in': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-builtin-name': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-literal': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-type': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-params': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-meta': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-link': {
|
||||
color: '#b08800',
|
||||
},
|
||||
'hljs-attribute': {
|
||||
color: '#0a64ae',
|
||||
},
|
||||
'hljs-string': {
|
||||
color: '#22863a',
|
||||
},
|
||||
'hljs-symbol': {
|
||||
color: '#22863a',
|
||||
},
|
||||
'hljs-bullet': {
|
||||
color: '#22863a',
|
||||
},
|
||||
'hljs-addition': {
|
||||
color: '#22863a',
|
||||
},
|
||||
'hljs-title': {
|
||||
color: '#005cc5',
|
||||
},
|
||||
'hljs-section': {
|
||||
color: '#005cc5',
|
||||
},
|
||||
'hljs-keyword': {
|
||||
color: '#6f42c1',
|
||||
},
|
||||
'hljs-selector-tag': {
|
||||
color: '#6f42c1',
|
||||
},
|
||||
hljs: {
|
||||
display: 'block',
|
||||
overflowX: 'auto',
|
||||
background: '#ffffff',
|
||||
color: '#24292f',
|
||||
padding: '0.75em',
|
||||
border: '1px solid #e8edf1',
|
||||
borderRadius: '10px',
|
||||
},
|
||||
'hljs-emphasis': {
|
||||
fontStyle: 'italic',
|
||||
},
|
||||
'hljs-strong': {
|
||||
fontWeight: 'bold',
|
||||
},
|
||||
} satisfies Record<string, CSSProperties>;
|
||||
|
||||
export default lightTheme;
|
||||
64
src/components/MessageRenderer/CodeBlock/index.tsx
Normal file
64
src/components/MessageRenderer/CodeBlock/index.tsx
Normal file
@@ -0,0 +1,64 @@
|
||||
'use client';
|
||||
|
||||
import { CheckIcon, CopyIcon } from '@phosphor-icons/react';
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import { useTheme } from 'next-themes';
|
||||
import SyntaxHighlighter from 'react-syntax-highlighter';
|
||||
import darkTheme from './CodeBlockDarkTheme';
|
||||
import lightTheme from './CodeBlockLightTheme';
|
||||
|
||||
const CodeBlock = ({
|
||||
language,
|
||||
children,
|
||||
}: {
|
||||
language: string;
|
||||
children: React.ReactNode;
|
||||
}) => {
|
||||
const { resolvedTheme } = useTheme();
|
||||
const [mounted, setMounted] = useState(false);
|
||||
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
setMounted(true);
|
||||
}, []);
|
||||
|
||||
const syntaxTheme = useMemo(() => {
|
||||
if (!mounted) return lightTheme;
|
||||
return resolvedTheme === 'dark' ? darkTheme : lightTheme;
|
||||
}, [mounted, resolvedTheme]);
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<button
|
||||
className="absolute top-2 right-2 p-1"
|
||||
onClick={() => {
|
||||
navigator.clipboard.writeText(children as string);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
}}
|
||||
>
|
||||
{copied ? (
|
||||
<CheckIcon
|
||||
size={16}
|
||||
className="absolute top-2 right-2 text-black/70 dark:text-white/70"
|
||||
/>
|
||||
) : (
|
||||
<CopyIcon
|
||||
size={16}
|
||||
className="absolute top-2 right-2 transition duration-200 text-black/70 dark:text-white/70 hover:text-gray-800/70 hover:dark:text-gray-300/70"
|
||||
/>
|
||||
)}
|
||||
</button>
|
||||
<SyntaxHighlighter
|
||||
language={language}
|
||||
style={syntaxTheme}
|
||||
showInlineLineNumbers
|
||||
>
|
||||
{children as string}
|
||||
</SyntaxHighlighter>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default CodeBlock;
|
||||
@@ -37,7 +37,7 @@ const MessageSources = ({ sources }: { sources: Chunk[] }) => {
|
||||
</p>
|
||||
<div className="flex flex-row items-center justify-between">
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{source.metadata.url === 'File' ? (
|
||||
{source.metadata.url.includes('file_id://') ? (
|
||||
<div className="bg-dark-200 hover:bg-dark-100 transition duration-200 flex items-center justify-center w-6 h-6 rounded-full">
|
||||
<File size={12} className="text-white/70" />
|
||||
</div>
|
||||
@@ -51,7 +51,9 @@ const MessageSources = ({ sources }: { sources: Chunk[] }) => {
|
||||
/>
|
||||
)}
|
||||
<p className="text-xs text-black/50 dark:text-white/50 overflow-hidden whitespace-nowrap text-ellipsis">
|
||||
{source.metadata.url.replace(/.+\/\/|www.|\..+/g, '')}
|
||||
{source.metadata.url.includes('file_id://')
|
||||
? 'Uploaded File'
|
||||
: source.metadata.url.replace(/.+\/\/|www.|\..+/g, '')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex flex-row items-center space-x-1 text-black/50 dark:text-white/50 text-xs">
|
||||
|
||||
@@ -205,8 +205,9 @@ const Navbar = () => {
|
||||
useEffect(() => {
|
||||
if (sections.length > 0 && sections[0].message) {
|
||||
const newTitle =
|
||||
sections[0].message.query.substring(0, 30) + '...' ||
|
||||
'New Conversation';
|
||||
sections[0].message.query.length > 30
|
||||
? `${sections[0].message.query.substring(0, 30).trim()}...`
|
||||
: sections[0].message.query || 'New Conversation';
|
||||
|
||||
setTitle(newTitle);
|
||||
const newTimeAgo = formatTimeDifference(
|
||||
|
||||
@@ -17,7 +17,7 @@ const SearchImages = ({
|
||||
messageId,
|
||||
}: {
|
||||
query: string;
|
||||
chatHistory: Message[];
|
||||
chatHistory: [string, string][];
|
||||
messageId: string;
|
||||
}) => {
|
||||
const [images, setImages] = useState<Image[] | null>(null);
|
||||
|
||||
@@ -30,7 +30,7 @@ const Searchvideos = ({
|
||||
messageId,
|
||||
}: {
|
||||
query: string;
|
||||
chatHistory: Message[];
|
||||
chatHistory: [string, string][];
|
||||
messageId: string;
|
||||
}) => {
|
||||
const [videos, setVideos] = useState<Video[] | null>(null);
|
||||
|
||||
@@ -310,7 +310,7 @@ const SettingsSwitch = ({
|
||||
checked={isChecked}
|
||||
onChange={handleSave}
|
||||
disabled={loading}
|
||||
className="group relative flex h-6 w-12 shrink-0 cursor-pointer rounded-full bg-white/10 p-1 duration-200 ease-in-out focus:outline-none transition-colors disabled:opacity-60 disabled:cursor-not-allowed data-[checked]:bg-sky-500"
|
||||
className="group relative flex h-6 w-12 shrink-0 cursor-pointer rounded-full bg-light-200 dark:bg-white/10 p-1 duration-200 ease-in-out focus:outline-none transition-colors disabled:opacity-60 disabled:cursor-not-allowed data-[checked]:bg-sky-500 dark:data-[checked]:bg-sky-500"
|
||||
>
|
||||
<span
|
||||
aria-hidden="true"
|
||||
|
||||
@@ -91,7 +91,7 @@ const WeatherWidget = () => {
|
||||
setData({
|
||||
temperature: data.temperature,
|
||||
condition: data.condition,
|
||||
location: 'Mars',
|
||||
location: location.city,
|
||||
humidity: data.humidity,
|
||||
windSpeed: data.windSpeed,
|
||||
icon: data.icon,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { Message } from '@/components/ChatWindow';
|
||||
|
||||
export const getSuggestions = async (chatHistory: [string, string][]) => {
|
||||
const chatTurns = chatHistory.map(([role, content]) => {
|
||||
if (role === 'human') {
|
||||
|
||||
@@ -29,7 +29,7 @@ const searchImages = async (
|
||||
query: z.string().describe('The image search query.'),
|
||||
});
|
||||
|
||||
const res = await llm.generateObject<z.infer<typeof schema>>({
|
||||
const res = await llm.generateObject<typeof schema>({
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
|
||||
@@ -28,7 +28,7 @@ const searchVideos = async (
|
||||
query: z.string().describe('The video search query.'),
|
||||
});
|
||||
|
||||
const res = await llm.generateObject<z.infer<typeof schema>>({
|
||||
const res = await llm.generateObject<typeof schema>({
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
|
||||
99
src/lib/agents/search/api.ts
Normal file
99
src/lib/agents/search/api.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { ResearcherOutput, SearchAgentInput } from './types';
|
||||
import SessionManager from '@/lib/session';
|
||||
import { classify } from './classifier';
|
||||
import Researcher from './researcher';
|
||||
import { getWriterPrompt } from '@/lib/prompts/search/writer';
|
||||
import { WidgetExecutor } from './widgets';
|
||||
|
||||
class APISearchAgent {
|
||||
async searchAsync(session: SessionManager, input: SearchAgentInput) {
|
||||
const classification = await classify({
|
||||
chatHistory: input.chatHistory,
|
||||
enabledSources: input.config.sources,
|
||||
query: input.followUp,
|
||||
llm: input.config.llm,
|
||||
});
|
||||
|
||||
const widgetPromise = WidgetExecutor.executeAll({
|
||||
classification,
|
||||
chatHistory: input.chatHistory,
|
||||
followUp: input.followUp,
|
||||
llm: input.config.llm,
|
||||
});
|
||||
|
||||
let searchPromise: Promise<ResearcherOutput> | null = null;
|
||||
|
||||
if (!classification.classification.skipSearch) {
|
||||
const researcher = new Researcher();
|
||||
searchPromise = researcher.research(SessionManager.createSession(), {
|
||||
chatHistory: input.chatHistory,
|
||||
followUp: input.followUp,
|
||||
classification: classification,
|
||||
config: input.config,
|
||||
});
|
||||
}
|
||||
|
||||
const [widgetOutputs, searchResults] = await Promise.all([
|
||||
widgetPromise,
|
||||
searchPromise,
|
||||
]);
|
||||
|
||||
if (searchResults) {
|
||||
session.emit('data', {
|
||||
type: 'searchResults',
|
||||
data: searchResults.searchFindings,
|
||||
});
|
||||
}
|
||||
|
||||
session.emit('data', {
|
||||
type: 'researchComplete',
|
||||
});
|
||||
|
||||
const finalContext =
|
||||
searchResults?.searchFindings
|
||||
.map(
|
||||
(f, index) =>
|
||||
`<result index=${index + 1} title=${f.metadata.title}>${f.content}</result>`,
|
||||
)
|
||||
.join('\n') || '';
|
||||
|
||||
const widgetContext = widgetOutputs
|
||||
.map((o) => {
|
||||
return `<result>${o.llmContext}</result>`;
|
||||
})
|
||||
.join('\n-------------\n');
|
||||
|
||||
const finalContextWithWidgets = `<search_results note="These are the search results and assistant can cite these">\n${finalContext}\n</search_results>\n<widgets_result noteForAssistant="Its output is already showed to the user, assistant can use this information to answer the query but do not CITE this as a souce">\n${widgetContext}\n</widgets_result>`;
|
||||
|
||||
const writerPrompt = getWriterPrompt(
|
||||
finalContextWithWidgets,
|
||||
input.config.systemInstructions,
|
||||
input.config.mode,
|
||||
);
|
||||
|
||||
const answerStream = input.config.llm.streamText({
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: writerPrompt,
|
||||
},
|
||||
...input.chatHistory,
|
||||
{
|
||||
role: 'user',
|
||||
content: input.followUp,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
for await (const chunk of answerStream) {
|
||||
session.emit('data', {
|
||||
type: 'response',
|
||||
data: chunk.contentChunk,
|
||||
});
|
||||
}
|
||||
|
||||
session.emit('end', {});
|
||||
}
|
||||
}
|
||||
|
||||
export default APISearchAgent;
|
||||
@@ -4,9 +4,53 @@ import { classify } from './classifier';
|
||||
import Researcher from './researcher';
|
||||
import { getWriterPrompt } from '@/lib/prompts/search/writer';
|
||||
import { WidgetExecutor } from './widgets';
|
||||
import db from '@/lib/db';
|
||||
import { chats, messages } from '@/lib/db/schema';
|
||||
import { and, eq, gt } from 'drizzle-orm';
|
||||
import { TextBlock } from '@/lib/types';
|
||||
|
||||
class SearchAgent {
|
||||
async searchAsync(session: SessionManager, input: SearchAgentInput) {
|
||||
const exists = await db.query.messages.findFirst({
|
||||
where: and(
|
||||
eq(messages.chatId, input.chatId),
|
||||
eq(messages.messageId, input.messageId),
|
||||
),
|
||||
});
|
||||
|
||||
if (!exists) {
|
||||
await db.insert(messages).values({
|
||||
chatId: input.chatId,
|
||||
messageId: input.messageId,
|
||||
backendId: session.id,
|
||||
query: input.followUp,
|
||||
createdAt: new Date().toISOString(),
|
||||
status: 'answering',
|
||||
responseBlocks: [],
|
||||
});
|
||||
} else {
|
||||
await db
|
||||
.delete(messages)
|
||||
.where(
|
||||
and(eq(messages.chatId, input.chatId), gt(messages.id, exists.id)),
|
||||
)
|
||||
.execute();
|
||||
await db
|
||||
.update(messages)
|
||||
.set({
|
||||
status: 'answering',
|
||||
backendId: session.id,
|
||||
responseBlocks: [],
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(messages.chatId, input.chatId),
|
||||
eq(messages.messageId, input.messageId),
|
||||
),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
const classification = await classify({
|
||||
chatHistory: input.chatHistory,
|
||||
enabledSources: input.config.sources,
|
||||
@@ -58,7 +102,7 @@ class SearchAgent {
|
||||
searchResults?.searchFindings
|
||||
.map(
|
||||
(f, index) =>
|
||||
`<result index=${index} title=${f.metadata.title}>${f.content}</result>`,
|
||||
`<result index=${index + 1} title=${f.metadata.title}>${f.content}</result>`,
|
||||
)
|
||||
.join('\n') || '';
|
||||
|
||||
@@ -70,7 +114,11 @@ class SearchAgent {
|
||||
|
||||
const finalContextWithWidgets = `<search_results note="These are the search results and assistant can cite these">\n${finalContext}\n</search_results>\n<widgets_result noteForAssistant="Its output is already showed to the user, assistant can use this information to answer the query but do not CITE this as a souce">\n${widgetContext}\n</widgets_result>`;
|
||||
|
||||
const writerPrompt = getWriterPrompt(finalContextWithWidgets);
|
||||
const writerPrompt = getWriterPrompt(
|
||||
finalContextWithWidgets,
|
||||
input.config.systemInstructions,
|
||||
input.config.mode,
|
||||
);
|
||||
const answerStream = input.config.llm.streamText({
|
||||
messages: [
|
||||
{
|
||||
@@ -85,18 +133,53 @@ class SearchAgent {
|
||||
],
|
||||
});
|
||||
|
||||
let accumulatedText = '';
|
||||
let responseBlockId = '';
|
||||
|
||||
for await (const chunk of answerStream) {
|
||||
accumulatedText += chunk.contentChunk;
|
||||
if (!responseBlockId) {
|
||||
const block: TextBlock = {
|
||||
id: crypto.randomUUID(),
|
||||
type: 'text',
|
||||
data: chunk.contentChunk,
|
||||
};
|
||||
|
||||
session.emit('data', {
|
||||
type: 'response',
|
||||
data: chunk.contentChunk,
|
||||
});
|
||||
session.emitBlock(block);
|
||||
|
||||
responseBlockId = block.id;
|
||||
} else {
|
||||
const block = session.getBlock(responseBlockId) as TextBlock | null;
|
||||
|
||||
if (!block) {
|
||||
continue;
|
||||
}
|
||||
|
||||
block.data += chunk.contentChunk;
|
||||
|
||||
session.updateBlock(block.id, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data',
|
||||
value: block.data,
|
||||
},
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
session.emit('end', {});
|
||||
|
||||
await db
|
||||
.update(messages)
|
||||
.set({
|
||||
status: 'completed',
|
||||
responseBlocks: session.getAllBlocks(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(messages.chatId, input.chatId),
|
||||
eq(messages.messageId, input.messageId),
|
||||
),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
129
src/lib/agents/search/researcher/actions/academicSearch.ts
Normal file
129
src/lib/agents/search/researcher/actions/academicSearch.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import z from 'zod';
|
||||
import { ResearchAction } from '../../types';
|
||||
import { Chunk, SearchResultsResearchBlock } from '@/lib/types';
|
||||
import { searchSearxng } from '@/lib/searxng';
|
||||
|
||||
const schema = z.object({
|
||||
queries: z.array(z.string()).describe('List of academic search queries'),
|
||||
});
|
||||
|
||||
const academicSearchDescription = `
|
||||
Use this tool to perform academic searches for scholarly articles, papers, and research studies relevant to the user's query. Provide a list of concise search queries that will help gather comprehensive academic information on the topic at hand.
|
||||
You can provide up to 3 queries at a time. Make sure the queries are specific and relevant to the user's needs.
|
||||
|
||||
For example, if the user is interested in recent advancements in renewable energy, your queries could be:
|
||||
1. "Recent advancements in renewable energy 2024"
|
||||
2. "Cutting-edge research on solar power technologies"
|
||||
3. "Innovations in wind energy systems"
|
||||
|
||||
If this tool is present and no other tools are more relevant, you MUST use this tool to get the needed academic information.
|
||||
`;
|
||||
|
||||
const academicSearchAction: ResearchAction<typeof schema> = {
|
||||
name: 'academic_search',
|
||||
schema: schema,
|
||||
getDescription: () => academicSearchDescription,
|
||||
getToolDescription: () =>
|
||||
"Use this tool to perform academic searches for scholarly articles, papers, and research studies relevant to the user's query. Provide a list of concise search queries that will help gather comprehensive academic information on the topic at hand.",
|
||||
enabled: (config) =>
|
||||
config.sources.includes('academic') &&
|
||||
config.classification.classification.skipSearch === false &&
|
||||
config.classification.classification.academicSearch === true,
|
||||
execute: async (input, additionalConfig) => {
|
||||
input.queries = input.queries.slice(0, 3);
|
||||
|
||||
const researchBlock = additionalConfig.session.getBlock(
|
||||
additionalConfig.researchBlockId,
|
||||
);
|
||||
|
||||
if (researchBlock && researchBlock.type === 'research') {
|
||||
researchBlock.data.subSteps.push({
|
||||
type: 'searching',
|
||||
id: crypto.randomUUID(),
|
||||
searching: input.queries,
|
||||
});
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
const searchResultsBlockId = crypto.randomUUID();
|
||||
let searchResultsEmitted = false;
|
||||
|
||||
let results: Chunk[] = [];
|
||||
|
||||
const search = async (q: string) => {
|
||||
const res = await searchSearxng(q, {
|
||||
engines: ['arxiv', 'google scholar', 'pubmed'],
|
||||
});
|
||||
|
||||
const resultChunks: Chunk[] = res.results.map((r) => ({
|
||||
content: r.content || r.title,
|
||||
metadata: {
|
||||
title: r.title,
|
||||
url: r.url,
|
||||
},
|
||||
}));
|
||||
|
||||
results.push(...resultChunks);
|
||||
|
||||
if (
|
||||
!searchResultsEmitted &&
|
||||
researchBlock &&
|
||||
researchBlock.type === 'research'
|
||||
) {
|
||||
searchResultsEmitted = true;
|
||||
|
||||
researchBlock.data.subSteps.push({
|
||||
id: searchResultsBlockId,
|
||||
type: 'search_results',
|
||||
reading: resultChunks,
|
||||
});
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
} else if (
|
||||
searchResultsEmitted &&
|
||||
researchBlock &&
|
||||
researchBlock.type === 'research'
|
||||
) {
|
||||
const subStepIndex = researchBlock.data.subSteps.findIndex(
|
||||
(step) => step.id === searchResultsBlockId,
|
||||
);
|
||||
|
||||
const subStep = researchBlock.data.subSteps[
|
||||
subStepIndex
|
||||
] as SearchResultsResearchBlock;
|
||||
|
||||
subStep.reading.push(...resultChunks);
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
}
|
||||
};
|
||||
|
||||
await Promise.all(input.queries.map(search));
|
||||
|
||||
return {
|
||||
type: 'search_results',
|
||||
results,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
export default academicSearchAction;
|
||||
@@ -1,12 +1,19 @@
|
||||
import z from 'zod';
|
||||
import { ResearchAction } from '../../types';
|
||||
|
||||
const actionDescription = `
|
||||
Use this action ONLY when you have completed all necessary research and are ready to provide a final answer to the user. This indicates that you have gathered sufficient information from previous steps and are concluding the research process.
|
||||
YOU MUST CALL THIS ACTION TO SIGNAL COMPLETION; DO NOT OUTPUT FINAL ANSWERS DIRECTLY TO THE USER.
|
||||
IT WILL BE AUTOMATICALLY TRIGGERED IF MAXIMUM ITERATIONS ARE REACHED SO IF YOU'RE LOW ON ITERATIONS, DON'T CALL IT AND INSTEAD FOCUS ON GATHERING ESSENTIAL INFO FIRST.
|
||||
`;
|
||||
|
||||
const doneAction: ResearchAction<any> = {
|
||||
name: 'done',
|
||||
description:
|
||||
'Only call this after ___plan AND after any other needed tool calls when you truly have enough to answer. Do not call if information is still missing.',
|
||||
enabled: (_) => true,
|
||||
schema: z.object({}),
|
||||
getToolDescription: () =>
|
||||
'Only call this after __reasoning_preamble AND after any other needed tool calls when you truly have enough to answer. Do not call if information is still missing.',
|
||||
getDescription: () => actionDescription,
|
||||
enabled: (_) => true,
|
||||
execute: async (params, additionalConfig) => {
|
||||
return {
|
||||
type: 'done',
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import academicSearchAction from './academicSearch';
|
||||
import doneAction from './done';
|
||||
import planAction from './plan';
|
||||
import ActionRegistry from './registry';
|
||||
import scrapeURLAction from './scrapeURL';
|
||||
import socialSearchAction from './socialSearch';
|
||||
import uploadsSearchAction from './uploadsSearch';
|
||||
import webSearchAction from './webSearch';
|
||||
|
||||
ActionRegistry.register(webSearchAction);
|
||||
ActionRegistry.register(doneAction);
|
||||
ActionRegistry.register(planAction);
|
||||
ActionRegistry.register(scrapeURLAction);
|
||||
ActionRegistry.register(uploadsSearchAction);
|
||||
ActionRegistry.register(academicSearchAction);
|
||||
ActionRegistry.register(socialSearchAction);
|
||||
|
||||
export { ActionRegistry };
|
||||
|
||||
@@ -9,12 +9,26 @@ const schema = z.object({
|
||||
),
|
||||
});
|
||||
|
||||
const actionDescription = `
|
||||
Use this tool FIRST on every turn to state your plan in natural language before any other action. Keep it short, action-focused, and tailored to the current query.
|
||||
Make sure to not include reference to any tools or actions you might take, just the plan itself. The user isn't aware about tools, but they love to see your thought process.
|
||||
|
||||
Here are some examples of good plans:
|
||||
<examples>
|
||||
- "Okay, the user wants to know the latest advancements in renewable energy. I will start by looking for recent articles and studies on this topic, then summarize the key points." -> "I have gathered enough information to provide a comprehensive answer."
|
||||
- "The user is asking about the health benefits of a Mediterranean diet. I will search for scientific studies and expert opinions on this diet, then compile the findings into a clear summary." -> "I have gathered information about the Mediterranean diet and its health benefits, I will now look up for any recent studies to ensure the information is current."
|
||||
</examples>
|
||||
|
||||
YOU CAN NEVER CALL ANY OTHER TOOL BEFORE CALLING THIS ONE FIRST, IF YOU DO, THAT CALL WOULD BE IGNORED.
|
||||
`;
|
||||
|
||||
const planAction: ResearchAction<typeof schema> = {
|
||||
name: '___plan',
|
||||
description:
|
||||
'Use this FIRST on every turn to state your plan in natural language before any other action. Keep it short, action-focused, and tailored to the current query.',
|
||||
name: '__reasoning_preamble',
|
||||
schema: schema,
|
||||
enabled: (_) => true,
|
||||
getToolDescription: () =>
|
||||
'Use this FIRST on every turn to state your plan in natural language before any other action. Keep it short, action-focused, and tailored to the current query.',
|
||||
getDescription: () => actionDescription,
|
||||
enabled: (config) => config.mode !== 'speed',
|
||||
execute: async (input, _) => {
|
||||
return {
|
||||
type: 'reasoning',
|
||||
|
||||
@@ -4,6 +4,8 @@ import {
|
||||
AdditionalConfig,
|
||||
ClassifierOutput,
|
||||
ResearchAction,
|
||||
SearchAgentConfig,
|
||||
SearchSources,
|
||||
} from '../../types';
|
||||
|
||||
class ActionRegistry {
|
||||
@@ -19,6 +21,9 @@ class ActionRegistry {
|
||||
|
||||
static getAvailableActions(config: {
|
||||
classification: ClassifierOutput;
|
||||
fileIds: string[];
|
||||
mode: SearchAgentConfig['mode'];
|
||||
sources: SearchSources[];
|
||||
}): ResearchAction[] {
|
||||
return Array.from(
|
||||
this.actions.values().filter((action) => action.enabled(config)),
|
||||
@@ -27,30 +32,42 @@ class ActionRegistry {
|
||||
|
||||
static getAvailableActionTools(config: {
|
||||
classification: ClassifierOutput;
|
||||
fileIds: string[];
|
||||
mode: SearchAgentConfig['mode'];
|
||||
sources: SearchSources[];
|
||||
}): Tool[] {
|
||||
const availableActions = this.getAvailableActions(config);
|
||||
|
||||
return availableActions.map((action) => ({
|
||||
name: action.name,
|
||||
description: action.description,
|
||||
description: action.getToolDescription({ mode: config.mode }),
|
||||
schema: action.schema,
|
||||
}));
|
||||
}
|
||||
|
||||
static getAvailableActionsDescriptions(config: {
|
||||
classification: ClassifierOutput;
|
||||
fileIds: string[];
|
||||
mode: SearchAgentConfig['mode'];
|
||||
sources: SearchSources[];
|
||||
}): string {
|
||||
const availableActions = this.getAvailableActions(config);
|
||||
|
||||
return availableActions
|
||||
.map((action) => `------------\n##${action.name}\n${action.description}`)
|
||||
.map(
|
||||
(action) =>
|
||||
`<tool name="${action.name}">\n${action.getDescription({ mode: config.mode })}\n</tool>`,
|
||||
)
|
||||
.join('\n\n');
|
||||
}
|
||||
|
||||
static async execute(
|
||||
name: string,
|
||||
params: any,
|
||||
additionalConfig: AdditionalConfig & { researchBlockId: string },
|
||||
additionalConfig: AdditionalConfig & {
|
||||
researchBlockId: string;
|
||||
fileIds: string[];
|
||||
},
|
||||
) {
|
||||
const action = this.actions.get(name);
|
||||
|
||||
@@ -63,7 +80,10 @@ class ActionRegistry {
|
||||
|
||||
static async executeAll(
|
||||
actions: ToolCall[],
|
||||
additionalConfig: AdditionalConfig & { researchBlockId: string },
|
||||
additionalConfig: AdditionalConfig & {
|
||||
researchBlockId: string;
|
||||
fileIds: string[];
|
||||
},
|
||||
): Promise<ActionOutput[]> {
|
||||
const results: ActionOutput[] = [];
|
||||
|
||||
|
||||
@@ -10,11 +10,19 @@ const schema = z.object({
|
||||
urls: z.array(z.string()).describe('A list of URLs to scrape content from.'),
|
||||
});
|
||||
|
||||
const actionDescription = `
|
||||
Use this tool to scrape and extract content from the provided URLs. This is useful when you the user has asked you to extract or summarize information from specific web pages. You can provide up to 3 URLs at a time. NEVER CALL THIS TOOL EXPLICITLY YOURSELF UNLESS INSTRUCTED TO DO SO BY THE USER.
|
||||
You should only call this tool when the user has specifically requested information from certain web pages, never call this yourself to get extra information without user instruction.
|
||||
|
||||
For example, if the user says "Please summarize the content of https://example.com/article", you can call this tool with that URL to get the content and then provide the summary or "What does X mean according to https://example.com/page", you can call this tool with that URL to get the content and provide the explanation.
|
||||
`;
|
||||
|
||||
const scrapeURLAction: ResearchAction<typeof schema> = {
|
||||
name: 'scrape_url',
|
||||
description:
|
||||
'Use this tool to scrape and extract content from the provided URLs. This is useful when you the user has asked you to extract or summarize information from specific web pages. You can provide up to 3 URLs at a time. NEVER CALL THIS TOOL EXPLICITLY YOURSELF UNLESS INSTRUCTED TO DO SO BY THE USER.',
|
||||
schema: schema,
|
||||
getToolDescription: () =>
|
||||
'Use this tool to scrape and extract content from the provided URLs. This is useful when you the user has asked you to extract or summarize information from specific web pages. You can provide up to 3 URLs at a time. NEVER CALL THIS TOOL EXPLICITLY YOURSELF UNLESS INSTRUCTED TO DO SO BY THE USER.',
|
||||
getDescription: () => actionDescription,
|
||||
enabled: (_) => true,
|
||||
execute: async (params, additionalConfig) => {
|
||||
params.urls = params.urls.slice(0, 3);
|
||||
|
||||
129
src/lib/agents/search/researcher/actions/socialSearch.ts
Normal file
129
src/lib/agents/search/researcher/actions/socialSearch.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import z from 'zod';
|
||||
import { ResearchAction } from '../../types';
|
||||
import { Chunk, SearchResultsResearchBlock } from '@/lib/types';
|
||||
import { searchSearxng } from '@/lib/searxng';
|
||||
|
||||
const schema = z.object({
|
||||
queries: z.array(z.string()).describe('List of social search queries'),
|
||||
});
|
||||
|
||||
const socialSearchDescription = `
|
||||
Use this tool to perform social media searches for relevant posts, discussions, and trends related to the user's query. Provide a list of concise search queries that will help gather comprehensive social media information on the topic at hand.
|
||||
You can provide up to 3 queries at a time. Make sure the queries are specific and relevant to the user's needs.
|
||||
|
||||
For example, if the user is interested in public opinion on electric vehicles, your queries could be:
|
||||
1. "Electric vehicles public opinion 2024"
|
||||
2. "Social media discussions on EV adoption"
|
||||
3. "Trends in electric vehicle usage"
|
||||
|
||||
If this tool is present and no other tools are more relevant, you MUST use this tool to get the needed social media information.
|
||||
`;
|
||||
|
||||
const socialSearchAction: ResearchAction<typeof schema> = {
|
||||
name: 'social_search',
|
||||
schema: schema,
|
||||
getDescription: () => socialSearchDescription,
|
||||
getToolDescription: () =>
|
||||
"Use this tool to perform social media searches for relevant posts, discussions, and trends related to the user's query. Provide a list of concise search queries that will help gather comprehensive social media information on the topic at hand.",
|
||||
enabled: (config) =>
|
||||
config.sources.includes('discussions') &&
|
||||
config.classification.classification.skipSearch === false &&
|
||||
config.classification.classification.discussionSearch === true,
|
||||
execute: async (input, additionalConfig) => {
|
||||
input.queries = input.queries.slice(0, 3);
|
||||
|
||||
const researchBlock = additionalConfig.session.getBlock(
|
||||
additionalConfig.researchBlockId,
|
||||
);
|
||||
|
||||
if (researchBlock && researchBlock.type === 'research') {
|
||||
researchBlock.data.subSteps.push({
|
||||
type: 'searching',
|
||||
id: crypto.randomUUID(),
|
||||
searching: input.queries,
|
||||
});
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
const searchResultsBlockId = crypto.randomUUID();
|
||||
let searchResultsEmitted = false;
|
||||
|
||||
let results: Chunk[] = [];
|
||||
|
||||
const search = async (q: string) => {
|
||||
const res = await searchSearxng(q, {
|
||||
engines: ['reddit'],
|
||||
});
|
||||
|
||||
const resultChunks: Chunk[] = res.results.map((r) => ({
|
||||
content: r.content || r.title,
|
||||
metadata: {
|
||||
title: r.title,
|
||||
url: r.url,
|
||||
},
|
||||
}));
|
||||
|
||||
results.push(...resultChunks);
|
||||
|
||||
if (
|
||||
!searchResultsEmitted &&
|
||||
researchBlock &&
|
||||
researchBlock.type === 'research'
|
||||
) {
|
||||
searchResultsEmitted = true;
|
||||
|
||||
researchBlock.data.subSteps.push({
|
||||
id: searchResultsBlockId,
|
||||
type: 'search_results',
|
||||
reading: resultChunks,
|
||||
});
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
} else if (
|
||||
searchResultsEmitted &&
|
||||
researchBlock &&
|
||||
researchBlock.type === 'research'
|
||||
) {
|
||||
const subStepIndex = researchBlock.data.subSteps.findIndex(
|
||||
(step) => step.id === searchResultsBlockId,
|
||||
);
|
||||
|
||||
const subStep = researchBlock.data.subSteps[
|
||||
subStepIndex
|
||||
] as SearchResultsResearchBlock;
|
||||
|
||||
subStep.reading.push(...resultChunks);
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
}
|
||||
};
|
||||
|
||||
await Promise.all(input.queries.map(search));
|
||||
|
||||
return {
|
||||
type: 'search_results',
|
||||
results,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
export default socialSearchAction;
|
||||
102
src/lib/agents/search/researcher/actions/uploadsSearch.ts
Normal file
102
src/lib/agents/search/researcher/actions/uploadsSearch.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import z from 'zod';
|
||||
import { ResearchAction } from '../../types';
|
||||
import UploadStore from '@/lib/uploads/store';
|
||||
|
||||
const schema = z.object({
|
||||
queries: z
|
||||
.array(z.string())
|
||||
.describe(
|
||||
'A list of queries to search in user uploaded files. Can be a maximum of 3 queries.',
|
||||
),
|
||||
});
|
||||
|
||||
const uploadsSearchAction: ResearchAction<typeof schema> = {
|
||||
name: 'uploads_search',
|
||||
enabled: (config) =>
|
||||
(config.classification.classification.personalSearch &&
|
||||
config.fileIds.length > 0) ||
|
||||
config.fileIds.length > 0,
|
||||
schema,
|
||||
getToolDescription: () =>
|
||||
`Use this tool to perform searches over the user's uploaded files. This is useful when you need to gather information from the user's documents to answer their questions. You can provide up to 3 queries at a time. You will have to use this every single time if this is present and relevant.`,
|
||||
getDescription: () => `
|
||||
Use this tool to perform searches over the user's uploaded files. This is useful when you need to gather information from the user's documents to answer their questions. You can provide up to 3 queries at a time. You will have to use this every single time if this is present and relevant.
|
||||
Always ensure that the queries you use are directly relevant to the user's request and pertain to the content of their uploaded files.
|
||||
|
||||
For example, if the user says "Please find information about X in my uploaded documents", you can call this tool with a query related to X to retrieve the relevant information from their files.
|
||||
Never use this tool to search the web or for information that is not contained within the user's uploaded files.
|
||||
`,
|
||||
execute: async (input, additionalConfig) => {
|
||||
input.queries = input.queries.slice(0, 3);
|
||||
|
||||
const researchBlock = additionalConfig.session.getBlock(
|
||||
additionalConfig.researchBlockId,
|
||||
);
|
||||
|
||||
if (researchBlock && researchBlock.type === 'research') {
|
||||
researchBlock.data.subSteps.push({
|
||||
id: crypto.randomUUID(),
|
||||
type: 'upload_searching',
|
||||
queries: input.queries,
|
||||
});
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
const uploadStore = new UploadStore({
|
||||
embeddingModel: additionalConfig.embedding,
|
||||
fileIds: additionalConfig.fileIds,
|
||||
});
|
||||
|
||||
const results = await uploadStore.query(input.queries, 10);
|
||||
|
||||
const seenIds = new Map<string, number>();
|
||||
|
||||
const filteredSearchResults = results
|
||||
.map((result, index) => {
|
||||
if (result.metadata.url && !seenIds.has(result.metadata.url)) {
|
||||
seenIds.set(result.metadata.url, index);
|
||||
return result;
|
||||
} else if (result.metadata.url && seenIds.has(result.metadata.url)) {
|
||||
const existingIndex = seenIds.get(result.metadata.url)!;
|
||||
const existingResult = results[existingIndex];
|
||||
|
||||
existingResult.content += `\n\n${result.content}`;
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return result;
|
||||
})
|
||||
.filter((r) => r !== undefined);
|
||||
|
||||
if (researchBlock && researchBlock.type === 'research') {
|
||||
researchBlock.data.subSteps.push({
|
||||
id: crypto.randomUUID(),
|
||||
type: 'upload_search_results',
|
||||
results: filteredSearchResults,
|
||||
});
|
||||
|
||||
additionalConfig.session.updateBlock(additionalConfig.researchBlockId, [
|
||||
{
|
||||
op: 'replace',
|
||||
path: '/data/subSteps',
|
||||
value: researchBlock.data.subSteps,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'search_results',
|
||||
results: filteredSearchResults,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
export default uploadsSearchAction;
|
||||
@@ -10,23 +10,79 @@ const actionSchema = z.object({
|
||||
.describe('An array of search queries to perform web searches for.'),
|
||||
});
|
||||
|
||||
const actionDescription = `
|
||||
Use immediately after the ___plan call when you need information. Default to using this unless you already have everything needed to finish. Provide 1-3 short, SEO-friendly queries (keywords, not sentences) that cover the user ask. Always prefer current/contextual queries (e.g., include year for news).
|
||||
const speedModePrompt = `
|
||||
Use this tool to perform web searches based on the provided queries. This is useful when you need to gather information from the web to answer the user's questions. You can provide up to 3 queries at a time. You will have to use this every single time if this is present and relevant.
|
||||
You are currently on speed mode, meaning you would only get to call this tool once. Make sure to prioritize the most important queries that are likely to get you the needed information in one go.
|
||||
|
||||
You can search maximum of 3 queries at a time.
|
||||
Your queries should be very targeted and specific to the information you need, avoid broad or generic queries.
|
||||
Your queries shouldn't be sentences but rather keywords that are SEO friendly and can be used to search the web for information.
|
||||
|
||||
For fast mode, you can only use this tool once so make sure to get all needed information in one go.
|
||||
For example, if the user is asking about the features of a new technology, you might use queries like "GPT-5.1 features", "GPT-5.1 release date", "GPT-5.1 improvements" rather than a broad query like "Tell me about GPT-5.1".
|
||||
|
||||
For balanced and quality modes, you can use this tool multiple times as needed.
|
||||
You can search for 3 queries in one go, make sure to utilize all 3 queries to maximize the information you can gather. If a question is simple, then split your queries to cover different aspects or related topics to get a comprehensive understanding.
|
||||
If this tool is present and no other tools are more relevant, you MUST use this tool to get the needed information.
|
||||
`;
|
||||
|
||||
In quality and balanced mode, first try to gather upper level information with broad queries, then use more specific queries based on what you find to find all information needed.
|
||||
const balancedModePrompt = `
|
||||
Use this tool to perform web searches based on the provided queries. This is useful when you need to gather information from the web to answer the user's questions. You can provide up to 3 queries at a time. You will have to use this every single time if this is present and relevant.
|
||||
|
||||
You can call this tool several times if needed to gather enough information.
|
||||
Start initially with broader queries to get an overview, then narrow down with more specific queries based on the results you receive.
|
||||
|
||||
Your queries shouldn't be sentences but rather keywords that are SEO friendly and can be used to search the web for information.
|
||||
|
||||
For example if the user is asking about Tesla, your actions should be like:
|
||||
1. __reasoning_preamble "The user is asking about Tesla. I will start with broader queries to get an overview of Tesla, then narrow down with more specific queries based on the results I receive." then
|
||||
2. web_search ["Tesla", "Tesla latest news", "Tesla stock price"] then
|
||||
3. __reasoning_preamble "Based on the previous search results, I will now narrow down my queries to focus on Tesla's recent developments and stock performance." then
|
||||
4. web_search ["Tesla Q2 2025 earnings", "Tesla new model 2025", "Tesla stock analysis"] then done.
|
||||
5. __reasoning_preamble "I have gathered enough information to provide a comprehensive answer."
|
||||
6. done.
|
||||
|
||||
You can search for 3 queries in one go, make sure to utilize all 3 queries to maximize the information you can gather. If a question is simple, then split your queries to cover different aspects or related topics to get a comprehensive understanding.
|
||||
If this tool is present and no other tools are more relevant, you MUST use this tool to get the needed information. You can call this tools, multiple times as needed.
|
||||
`;
|
||||
|
||||
const qualityModePrompt = `
|
||||
Use this tool to perform web searches based on the provided queries. This is useful when you need to gather information from the web to answer the user's questions. You can provide up to 3 queries at a time. You will have to use this every single time if this is present and relevant.
|
||||
|
||||
You have to call this tool several times to gather enough information unless the question is very simple (like greeting questions or basic facts).
|
||||
Start initially with broader queries to get an overview, then narrow down with more specific queries based on the results you receive.
|
||||
Never stop before at least 5-6 iterations of searches unless the user question is very simple.
|
||||
|
||||
Your queries shouldn't be sentences but rather keywords that are SEO friendly and can be used to search the web for information.
|
||||
|
||||
You can search for 3 queries in one go, make sure to utilize all 3 queries to maximize the information you can gather. If a question is simple, then split your queries to cover different aspects or related topics to get a comprehensive understanding.
|
||||
If this tool is present and no other tools are more relevant, you MUST use this tool to get the needed information. You can call this tools, multiple times as needed.
|
||||
`;
|
||||
|
||||
const webSearchAction: ResearchAction<typeof actionSchema> = {
|
||||
name: 'web_search',
|
||||
description: actionDescription,
|
||||
schema: actionSchema,
|
||||
getToolDescription: () =>
|
||||
"Use this tool to perform web searches based on the provided queries. This is useful when you need to gather information from the web to answer the user's questions. You can provide up to 3 queries at a time. You will have to use this every single time if this is present and relevant.",
|
||||
getDescription: (config) => {
|
||||
let prompt = '';
|
||||
|
||||
switch (config.mode) {
|
||||
case 'speed':
|
||||
prompt = speedModePrompt;
|
||||
break;
|
||||
case 'balanced':
|
||||
prompt = balancedModePrompt;
|
||||
break;
|
||||
case 'quality':
|
||||
prompt = qualityModePrompt;
|
||||
break;
|
||||
default:
|
||||
prompt = speedModePrompt;
|
||||
break;
|
||||
}
|
||||
|
||||
return prompt;
|
||||
},
|
||||
enabled: (config) =>
|
||||
config.sources.includes('web') &&
|
||||
config.classification.classification.skipSearch === false,
|
||||
execute: async (input, additionalConfig) => {
|
||||
input.queries = input.queries.slice(0, 3);
|
||||
|
||||
@@ -21,11 +21,17 @@ class Researcher {
|
||||
|
||||
const availableTools = ActionRegistry.getAvailableActionTools({
|
||||
classification: input.classification,
|
||||
fileIds: input.config.fileIds,
|
||||
mode: input.config.mode,
|
||||
sources: input.config.sources,
|
||||
});
|
||||
|
||||
const availableActionsDescription =
|
||||
ActionRegistry.getAvailableActionsDescriptions({
|
||||
classification: input.classification,
|
||||
fileIds: input.config.fileIds,
|
||||
mode: input.config.mode,
|
||||
sources: input.config.sources,
|
||||
});
|
||||
|
||||
const researchBlockId = crypto.randomUUID();
|
||||
@@ -56,6 +62,7 @@ class Researcher {
|
||||
input.config.mode,
|
||||
i,
|
||||
maxIteration,
|
||||
input.config.fileIds,
|
||||
);
|
||||
|
||||
const actionStream = input.config.llm.streamText({
|
||||
@@ -80,7 +87,7 @@ class Researcher {
|
||||
if (partialRes.toolCallChunk.length > 0) {
|
||||
partialRes.toolCallChunk.forEach((tc) => {
|
||||
if (
|
||||
tc.name === '___plan' &&
|
||||
tc.name === '__reasoning_preamble' &&
|
||||
tc.arguments['plan'] &&
|
||||
!reasoningEmitted &&
|
||||
block &&
|
||||
@@ -102,7 +109,7 @@ class Researcher {
|
||||
},
|
||||
]);
|
||||
} else if (
|
||||
tc.name === '___plan' &&
|
||||
tc.name === '__reasoning_preamble' &&
|
||||
tc.arguments['plan'] &&
|
||||
reasoningEmitted &&
|
||||
block &&
|
||||
@@ -159,6 +166,7 @@ class Researcher {
|
||||
embedding: input.config.embedding,
|
||||
session: session,
|
||||
researchBlockId: researchBlockId,
|
||||
fileIds: input.config.fileIds,
|
||||
});
|
||||
|
||||
actionOutput.push(...actionResults);
|
||||
@@ -198,8 +206,9 @@ class Researcher {
|
||||
})
|
||||
.filter((r) => r !== undefined);
|
||||
|
||||
session.emit('data', {
|
||||
type: 'sources',
|
||||
session.emitBlock({
|
||||
id: crypto.randomUUID(),
|
||||
type: 'source',
|
||||
data: filteredSearchResults,
|
||||
});
|
||||
|
||||
|
||||
@@ -8,15 +8,19 @@ export type SearchSources = 'web' | 'discussions' | 'academic';
|
||||
|
||||
export type SearchAgentConfig = {
|
||||
sources: SearchSources[];
|
||||
fileIds: string[];
|
||||
llm: BaseLLM<any>;
|
||||
embedding: BaseEmbedding<any>;
|
||||
mode: 'speed' | 'balanced' | 'quality';
|
||||
systemInstructions: string;
|
||||
};
|
||||
|
||||
export type SearchAgentInput = {
|
||||
chatHistory: ChatTurnMessage[];
|
||||
followUp: string;
|
||||
config: SearchAgentConfig;
|
||||
chatId: string;
|
||||
messageId: string;
|
||||
};
|
||||
|
||||
export type WidgetInput = {
|
||||
@@ -99,13 +103,20 @@ export interface ResearchAction<
|
||||
TSchema extends z.ZodObject<any> = z.ZodObject<any>,
|
||||
> {
|
||||
name: string;
|
||||
description: string;
|
||||
schema: z.ZodObject<any>;
|
||||
enabled: (config: { classification: ClassifierOutput }) => boolean;
|
||||
getToolDescription: (config: { mode: SearchAgentConfig['mode'] }) => string;
|
||||
getDescription: (config: { mode: SearchAgentConfig['mode'] }) => string;
|
||||
enabled: (config: {
|
||||
classification: ClassifierOutput;
|
||||
fileIds: string[];
|
||||
mode: SearchAgentConfig['mode'];
|
||||
sources: SearchSources[];
|
||||
}) => boolean;
|
||||
execute: (
|
||||
params: z.infer<TSchema>,
|
||||
additionalConfig: AdditionalConfig & {
|
||||
researchBlockId: string;
|
||||
fileIds: string[];
|
||||
},
|
||||
) => Promise<ActionOutput>;
|
||||
}
|
||||
|
||||
@@ -51,6 +51,10 @@ const calculationWidget: Widget = {
|
||||
schema,
|
||||
});
|
||||
|
||||
if (output.notPresent) {
|
||||
return;
|
||||
}
|
||||
|
||||
const result = mathEval(output.expression);
|
||||
|
||||
return {
|
||||
|
||||
@@ -19,7 +19,7 @@ const generateSuggestions = async (
|
||||
input: SuggestionGeneratorInput,
|
||||
llm: BaseLLM<any>,
|
||||
) => {
|
||||
const res = await llm.generateObject<z.infer<typeof schema>>({
|
||||
const res = await llm.generateObject<typeof schema>({
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
|
||||
@@ -45,6 +45,7 @@ fs.readdirSync(migrationsFolder)
|
||||
const already = db
|
||||
.prepare('SELECT 1 FROM ran_migrations WHERE name = ?')
|
||||
.get(migrationName);
|
||||
|
||||
if (already) {
|
||||
console.log(`Skipping already-applied migration: ${file}`);
|
||||
return;
|
||||
@@ -113,6 +114,160 @@ fs.readdirSync(migrationsFolder)
|
||||
|
||||
db.exec('DROP TABLE messages;');
|
||||
db.exec('ALTER TABLE messages_with_sources RENAME TO messages;');
|
||||
} else if (migrationName === '0002') {
|
||||
/* Migrate chat */
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS chats_new (
|
||||
id TEXT PRIMARY KEY,
|
||||
title TEXT NOT NULL,
|
||||
createdAt TEXT NOT NULL,
|
||||
sources TEXT DEFAULT '[]',
|
||||
files TEXT DEFAULT '[]'
|
||||
);
|
||||
`);
|
||||
|
||||
const chats = db
|
||||
.prepare('SELECT id, title, createdAt, files FROM chats')
|
||||
.all();
|
||||
|
||||
const insertChat = db.prepare(`
|
||||
INSERT INTO chats_new (id, title, createdAt, sources, files)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
chats.forEach((chat: any) => {
|
||||
let files = chat.files;
|
||||
while (typeof files === 'string') {
|
||||
files = JSON.parse(files || '[]');
|
||||
}
|
||||
|
||||
insertChat.run(
|
||||
chat.id,
|
||||
chat.title,
|
||||
chat.createdAt,
|
||||
'["web"]',
|
||||
JSON.stringify(files),
|
||||
);
|
||||
});
|
||||
|
||||
db.exec('DROP TABLE chats;');
|
||||
db.exec('ALTER TABLE chats_new RENAME TO chats;');
|
||||
|
||||
/* Migrate messages */
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS messages_new (
|
||||
id INTEGER PRIMARY KEY,
|
||||
messageId TEXT NOT NULL,
|
||||
chatId TEXT NOT NULL,
|
||||
backendId TEXT NOT NULL,
|
||||
query TEXT NOT NULL,
|
||||
createdAt TEXT NOT NULL,
|
||||
responseBlocks TEXT DEFAULT '[]',
|
||||
status TEXT DEFAULT 'answering'
|
||||
);
|
||||
`);
|
||||
|
||||
const messages = db
|
||||
.prepare(
|
||||
'SELECT id, messageId, chatId, type, content, createdAt, sources FROM messages ORDER BY id ASC',
|
||||
)
|
||||
.all();
|
||||
|
||||
const insertMessage = db.prepare(`
|
||||
INSERT INTO messages_new (messageId, chatId, backendId, query, createdAt, responseBlocks, status)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
let currentMessageData: {
|
||||
sources?: any[];
|
||||
response?: string;
|
||||
query?: string;
|
||||
messageId?: string;
|
||||
chatId?: string;
|
||||
createdAt?: string;
|
||||
} = {};
|
||||
let lastCompleted = true;
|
||||
|
||||
messages.forEach((msg: any) => {
|
||||
if (msg.type === 'user' && lastCompleted) {
|
||||
currentMessageData = {};
|
||||
currentMessageData.messageId = msg.messageId;
|
||||
currentMessageData.chatId = msg.chatId;
|
||||
currentMessageData.query = msg.content;
|
||||
currentMessageData.createdAt = msg.createdAt;
|
||||
lastCompleted = false;
|
||||
} else if (msg.type === 'source' && !lastCompleted) {
|
||||
let sources = msg.sources;
|
||||
|
||||
while (typeof sources === 'string') {
|
||||
sources = JSON.parse(sources || '[]');
|
||||
}
|
||||
|
||||
currentMessageData.sources = sources;
|
||||
} else if (msg.type === 'assistant' && !lastCompleted) {
|
||||
currentMessageData.response = msg.content;
|
||||
insertMessage.run(
|
||||
currentMessageData.messageId,
|
||||
currentMessageData.chatId,
|
||||
`${currentMessageData.messageId}-backend`,
|
||||
currentMessageData.query,
|
||||
currentMessageData.createdAt,
|
||||
JSON.stringify([
|
||||
{
|
||||
id: crypto.randomUUID(),
|
||||
type: 'text',
|
||||
data: currentMessageData.response || '',
|
||||
},
|
||||
...(currentMessageData.sources &&
|
||||
currentMessageData.sources.length > 0
|
||||
? [
|
||||
{
|
||||
id: crypto.randomUUID(),
|
||||
type: 'source',
|
||||
data: currentMessageData.sources,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
]),
|
||||
'completed',
|
||||
);
|
||||
|
||||
lastCompleted = true;
|
||||
} else if (msg.type === 'user' && !lastCompleted) {
|
||||
/* Message wasn't completed so we'll just create the record with empty response */
|
||||
insertMessage.run(
|
||||
currentMessageData.messageId,
|
||||
currentMessageData.chatId,
|
||||
`${currentMessageData.messageId}-backend`,
|
||||
currentMessageData.query,
|
||||
currentMessageData.createdAt,
|
||||
JSON.stringify([
|
||||
{
|
||||
id: crypto.randomUUID(),
|
||||
type: 'text',
|
||||
data: '',
|
||||
},
|
||||
...(currentMessageData.sources &&
|
||||
currentMessageData.sources.length > 0
|
||||
? [
|
||||
{
|
||||
id: crypto.randomUUID(),
|
||||
type: 'source',
|
||||
data: currentMessageData.sources,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
]),
|
||||
'completed',
|
||||
);
|
||||
|
||||
lastCompleted = true;
|
||||
}
|
||||
});
|
||||
|
||||
db.exec('DROP TABLE messages;');
|
||||
db.exec('ALTER TABLE messages_new RENAME TO messages;');
|
||||
} else {
|
||||
// Execute each statement separately
|
||||
statements.forEach((stmt) => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { sql } from 'drizzle-orm';
|
||||
import { text, integer, sqliteTable } from 'drizzle-orm/sqlite-core';
|
||||
import { Block } from '../types';
|
||||
import { SearchSources } from '../agents/search/types';
|
||||
|
||||
export const messages = sqliteTable('messages', {
|
||||
id: integer('id').primaryKey(),
|
||||
@@ -26,7 +27,11 @@ export const chats = sqliteTable('chats', {
|
||||
id: text('id').primaryKey(),
|
||||
title: text('title').notNull(),
|
||||
createdAt: text('createdAt').notNull(),
|
||||
focusMode: text('focusMode').notNull(),
|
||||
sources: text('sources', {
|
||||
mode: 'json',
|
||||
})
|
||||
.$type<SearchSources[]>()
|
||||
.default(sql`'[]'`),
|
||||
files: text('files', { mode: 'json' })
|
||||
.$type<DBFile[]>()
|
||||
.default(sql`'[]'`),
|
||||
|
||||
@@ -34,7 +34,7 @@ type ChatContext = {
|
||||
chatHistory: [string, string][];
|
||||
files: File[];
|
||||
fileIds: string[];
|
||||
focusMode: string;
|
||||
sources: string[];
|
||||
chatId: string | undefined;
|
||||
optimizationMode: string;
|
||||
isMessagesLoaded: boolean;
|
||||
@@ -48,7 +48,7 @@ type ChatContext = {
|
||||
researchEnded: boolean;
|
||||
setResearchEnded: (ended: boolean) => void;
|
||||
setOptimizationMode: (mode: string) => void;
|
||||
setFocusMode: (mode: string) => void;
|
||||
setSources: (sources: string[]) => void;
|
||||
setFiles: (files: File[]) => void;
|
||||
setFileIds: (fileIds: string[]) => void;
|
||||
sendMessage: (
|
||||
@@ -175,8 +175,8 @@ const loadMessages = async (
|
||||
chatId: string,
|
||||
setMessages: (messages: Message[]) => void,
|
||||
setIsMessagesLoaded: (loaded: boolean) => void,
|
||||
setChatHistory: (history: [string, string][]) => void,
|
||||
setFocusMode: (mode: string) => void,
|
||||
chatHistory: React.MutableRefObject<[string, string][]>,
|
||||
setSources: (sources: string[]) => void,
|
||||
setNotFound: (notFound: boolean) => void,
|
||||
setFiles: (files: File[]) => void,
|
||||
setFileIds: (fileIds: string[]) => void,
|
||||
@@ -233,8 +233,8 @@ const loadMessages = async (
|
||||
setFiles(files);
|
||||
setFileIds(files.map((file: File) => file.fileId));
|
||||
|
||||
setChatHistory(history);
|
||||
setFocusMode(data.chat.focusMode);
|
||||
chatHistory.current = history;
|
||||
setSources(data.chat.sources);
|
||||
setIsMessagesLoaded(true);
|
||||
};
|
||||
|
||||
@@ -243,7 +243,7 @@ export const chatContext = createContext<ChatContext>({
|
||||
chatId: '',
|
||||
fileIds: [],
|
||||
files: [],
|
||||
focusMode: '',
|
||||
sources: [],
|
||||
hasError: false,
|
||||
isMessagesLoaded: false,
|
||||
isReady: false,
|
||||
@@ -260,7 +260,7 @@ export const chatContext = createContext<ChatContext>({
|
||||
sendMessage: async () => {},
|
||||
setFileIds: () => {},
|
||||
setFiles: () => {},
|
||||
setFocusMode: () => {},
|
||||
setSources: () => {},
|
||||
setOptimizationMode: () => {},
|
||||
setChatModelProvider: () => {},
|
||||
setEmbeddingModelProvider: () => {},
|
||||
@@ -269,6 +269,7 @@ export const chatContext = createContext<ChatContext>({
|
||||
|
||||
export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
const params: { chatId: string } = useParams();
|
||||
|
||||
const searchParams = useSearchParams();
|
||||
const initialMessage = searchParams.get('q');
|
||||
|
||||
@@ -280,13 +281,13 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
|
||||
const [researchEnded, setResearchEnded] = useState(false);
|
||||
|
||||
const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
|
||||
const chatHistory = useRef<[string, string][]>([]);
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
const [fileIds, setFileIds] = useState<string[]>([]);
|
||||
|
||||
const [focusMode, setFocusMode] = useState('webSearch');
|
||||
const [sources, setSources] = useState<string[]>(['web']);
|
||||
const [optimizationMode, setOptimizationMode] = useState('speed');
|
||||
|
||||
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
|
||||
@@ -401,6 +402,64 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
});
|
||||
}, [messages]);
|
||||
|
||||
const isReconnectingRef = useRef(false);
|
||||
const handledMessageEndRef = useRef<Set<string>>(new Set());
|
||||
|
||||
const checkReconnect = async () => {
|
||||
if (isReconnectingRef.current) return;
|
||||
|
||||
setIsReady(true);
|
||||
console.debug(new Date(), 'app:ready');
|
||||
|
||||
if (messages.length > 0) {
|
||||
const lastMsg = messages[messages.length - 1];
|
||||
|
||||
if (lastMsg.status === 'answering') {
|
||||
setLoading(true);
|
||||
setResearchEnded(false);
|
||||
setMessageAppeared(false);
|
||||
|
||||
isReconnectingRef.current = true;
|
||||
|
||||
const res = await fetch(`/api/reconnect/${lastMsg.backendId}`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (!res.body) throw new Error('No response body');
|
||||
|
||||
const reader = res.body?.getReader();
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
|
||||
let partialChunk = '';
|
||||
|
||||
const messageHandler = getMessageHandler(lastMsg);
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
partialChunk += decoder.decode(value, { stream: true });
|
||||
|
||||
try {
|
||||
const messages = partialChunk.split('\n');
|
||||
for (const msg of messages) {
|
||||
if (!msg.trim()) continue;
|
||||
const json = JSON.parse(msg);
|
||||
messageHandler(json);
|
||||
}
|
||||
partialChunk = '';
|
||||
} catch (error) {
|
||||
console.warn('Incomplete JSON, waiting for next chunk...');
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
isReconnectingRef.current = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
checkConfig(
|
||||
setChatModelProvider,
|
||||
@@ -415,7 +474,7 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
if (params.chatId && params.chatId !== chatId) {
|
||||
setChatId(params.chatId);
|
||||
setMessages([]);
|
||||
setChatHistory([]);
|
||||
chatHistory.current = [];
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
setIsMessagesLoaded(false);
|
||||
@@ -435,8 +494,8 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
chatId,
|
||||
setMessages,
|
||||
setIsMessagesLoaded,
|
||||
setChatHistory,
|
||||
setFocusMode,
|
||||
chatHistory,
|
||||
setSources,
|
||||
setNotFound,
|
||||
setFiles,
|
||||
setFileIds,
|
||||
@@ -454,13 +513,15 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
}, [messages]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isMessagesLoaded && isConfigReady) {
|
||||
if (isMessagesLoaded && isConfigReady && newChatCreated) {
|
||||
setIsReady(true);
|
||||
console.debug(new Date(), 'app:ready');
|
||||
} else if (isMessagesLoaded && isConfigReady && !newChatCreated) {
|
||||
checkReconnect();
|
||||
} else {
|
||||
setIsReady(false);
|
||||
}
|
||||
}, [isMessagesLoaded, isConfigReady]);
|
||||
}, [isMessagesLoaded, isConfigReady, newChatCreated]);
|
||||
|
||||
const rewrite = (messageId: string) => {
|
||||
const index = messages.findIndex((msg) => msg.messageId === messageId);
|
||||
@@ -469,9 +530,7 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
|
||||
setMessages((prev) => prev.slice(0, index));
|
||||
|
||||
setChatHistory((prev) => {
|
||||
return prev.slice(0, index * 2);
|
||||
});
|
||||
chatHistory.current = chatHistory.current.slice(0, index * 2);
|
||||
|
||||
const messageToRewrite = messages[index];
|
||||
sendMessage(messageToRewrite.query, messageToRewrite.messageId, true);
|
||||
@@ -488,38 +547,10 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isConfigReady, isReady, initialMessage]);
|
||||
|
||||
const sendMessage: ChatContext['sendMessage'] = async (
|
||||
message,
|
||||
messageId,
|
||||
rewrite = false,
|
||||
) => {
|
||||
if (loading || !message) return;
|
||||
setLoading(true);
|
||||
setResearchEnded(false);
|
||||
setMessageAppeared(false);
|
||||
const getMessageHandler = (message: Message) => {
|
||||
const messageId = message.messageId;
|
||||
|
||||
if (messages.length <= 1) {
|
||||
window.history.replaceState(null, '', `/c/${chatId}`);
|
||||
}
|
||||
|
||||
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
const backendId = crypto.randomBytes(20).toString('hex');
|
||||
|
||||
const newMessage: Message = {
|
||||
messageId,
|
||||
chatId: chatId!,
|
||||
backendId,
|
||||
query: message,
|
||||
responseBlocks: [],
|
||||
status: 'answering',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
setMessages((prevMessages) => [...prevMessages, newMessage]);
|
||||
|
||||
const receivedTextRef = { current: '' };
|
||||
|
||||
const messageHandler = async (data: any) => {
|
||||
return async (data: any) => {
|
||||
if (data.type === 'error') {
|
||||
toast.error(data.data);
|
||||
setLoading(false);
|
||||
@@ -536,7 +567,7 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
if (data.type === 'researchComplete') {
|
||||
setResearchEnded(true);
|
||||
if (
|
||||
newMessage.responseBlocks.find(
|
||||
message.responseBlocks.find(
|
||||
(b) => b.type === 'source' && b.data.length > 0,
|
||||
)
|
||||
) {
|
||||
@@ -548,6 +579,20 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) => {
|
||||
if (msg.messageId === messageId) {
|
||||
const exists = msg.responseBlocks.findIndex(
|
||||
(b) => b.id === data.block.id,
|
||||
);
|
||||
|
||||
if (exists !== -1) {
|
||||
const existingBlocks = [...msg.responseBlocks];
|
||||
existingBlocks[exists] = data.block;
|
||||
|
||||
return {
|
||||
...msg,
|
||||
responseBlocks: existingBlocks,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...msg,
|
||||
responseBlocks: [...msg.responseBlocks, data.block],
|
||||
@@ -556,6 +601,13 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
return msg;
|
||||
}),
|
||||
);
|
||||
|
||||
if (
|
||||
(data.block.type === 'source' && data.block.data.length > 0) ||
|
||||
data.block.type === 'text'
|
||||
) {
|
||||
setMessageAppeared(true);
|
||||
}
|
||||
}
|
||||
|
||||
if (data.type === 'updateBlock') {
|
||||
@@ -577,75 +629,28 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (data.type === 'sources') {
|
||||
const sourceBlock: Block = {
|
||||
id: crypto.randomBytes(7).toString('hex'),
|
||||
type: 'source',
|
||||
data: data.data,
|
||||
};
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) => {
|
||||
if (msg.messageId === messageId) {
|
||||
return {
|
||||
...msg,
|
||||
responseBlocks: [...msg.responseBlocks, sourceBlock],
|
||||
};
|
||||
}
|
||||
return msg;
|
||||
}),
|
||||
);
|
||||
if (data.data.length > 0) {
|
||||
setMessageAppeared(true);
|
||||
}
|
||||
}
|
||||
|
||||
if (data.type === 'message') {
|
||||
receivedTextRef.current += data.data;
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) => {
|
||||
if (msg.messageId === messageId) {
|
||||
const existingTextBlockIndex = msg.responseBlocks.findIndex(
|
||||
(b) => b.type === 'text',
|
||||
);
|
||||
|
||||
if (existingTextBlockIndex >= 0) {
|
||||
const updatedBlocks = [...msg.responseBlocks];
|
||||
const existingBlock = updatedBlocks[
|
||||
existingTextBlockIndex
|
||||
] as Block & { type: 'text' };
|
||||
updatedBlocks[existingTextBlockIndex] = {
|
||||
...existingBlock,
|
||||
data: existingBlock.data + data.data,
|
||||
};
|
||||
return { ...msg, responseBlocks: updatedBlocks };
|
||||
} else {
|
||||
const textBlock: Block = {
|
||||
id: crypto.randomBytes(7).toString('hex'),
|
||||
type: 'text',
|
||||
data: data.data,
|
||||
};
|
||||
return {
|
||||
...msg,
|
||||
responseBlocks: [...msg.responseBlocks, textBlock],
|
||||
};
|
||||
}
|
||||
}
|
||||
return msg;
|
||||
}),
|
||||
);
|
||||
setMessageAppeared(true);
|
||||
}
|
||||
|
||||
if (data.type === 'messageEnd') {
|
||||
if (handledMessageEndRef.current.has(messageId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
handledMessageEndRef.current.add(messageId);
|
||||
|
||||
const currentMsg = messagesRef.current.find(
|
||||
(msg) => msg.messageId === messageId,
|
||||
);
|
||||
|
||||
const newHistory: [string, string][] = [
|
||||
...chatHistory,
|
||||
['human', message],
|
||||
['assistant', receivedTextRef.current],
|
||||
...chatHistory.current,
|
||||
['human', message.query],
|
||||
[
|
||||
'assistant',
|
||||
currentMsg?.responseBlocks.find((b) => b.type === 'text')?.data ||
|
||||
'',
|
||||
],
|
||||
];
|
||||
|
||||
setChatHistory(newHistory);
|
||||
chatHistory.current = newHistory;
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
@@ -662,19 +667,18 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
const autoMediaSearch = getAutoMediaSearch();
|
||||
|
||||
if (autoMediaSearch) {
|
||||
document
|
||||
.getElementById(`search-images-${lastMsg.messageId}`)
|
||||
?.click();
|
||||
setTimeout(() => {
|
||||
document
|
||||
.getElementById(`search-images-${lastMsg.messageId}`)
|
||||
?.click();
|
||||
|
||||
document
|
||||
.getElementById(`search-videos-${lastMsg.messageId}`)
|
||||
?.click();
|
||||
document
|
||||
.getElementById(`search-videos-${lastMsg.messageId}`)
|
||||
?.click();
|
||||
}, 200);
|
||||
}
|
||||
|
||||
// Check if there are sources and no suggestions
|
||||
const currentMsg = messagesRef.current.find(
|
||||
(msg) => msg.messageId === messageId,
|
||||
);
|
||||
|
||||
const hasSourceBlocks = currentMsg?.responseBlocks.some(
|
||||
(block) => block.type === 'source' && block.data.length > 0,
|
||||
@@ -705,6 +709,36 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const sendMessage: ChatContext['sendMessage'] = async (
|
||||
message,
|
||||
messageId,
|
||||
rewrite = false,
|
||||
) => {
|
||||
if (loading || !message) return;
|
||||
setLoading(true);
|
||||
setResearchEnded(false);
|
||||
setMessageAppeared(false);
|
||||
|
||||
if (messages.length <= 1) {
|
||||
window.history.replaceState(null, '', `/c/${chatId}`);
|
||||
}
|
||||
|
||||
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
const backendId = crypto.randomBytes(20).toString('hex');
|
||||
|
||||
const newMessage: Message = {
|
||||
messageId,
|
||||
chatId: chatId!,
|
||||
backendId,
|
||||
query: message,
|
||||
responseBlocks: [],
|
||||
status: 'answering',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
setMessages((prevMessages) => [...prevMessages, newMessage]);
|
||||
|
||||
const messageIndex = messages.findIndex((m) => m.messageId === messageId);
|
||||
|
||||
@@ -722,11 +756,14 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
},
|
||||
chatId: chatId!,
|
||||
files: fileIds,
|
||||
focusMode: focusMode,
|
||||
sources: sources,
|
||||
optimizationMode: optimizationMode,
|
||||
history: rewrite
|
||||
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
|
||||
: chatHistory,
|
||||
? chatHistory.current.slice(
|
||||
0,
|
||||
messageIndex === -1 ? undefined : messageIndex,
|
||||
)
|
||||
: chatHistory.current,
|
||||
chatModel: {
|
||||
key: chatModelProvider.key,
|
||||
providerId: chatModelProvider.providerId,
|
||||
@@ -746,6 +783,8 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
|
||||
let partialChunk = '';
|
||||
|
||||
const messageHandler = getMessageHandler(newMessage);
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
@@ -771,10 +810,10 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
value={{
|
||||
messages,
|
||||
sections,
|
||||
chatHistory,
|
||||
chatHistory: chatHistory.current,
|
||||
files,
|
||||
fileIds,
|
||||
focusMode,
|
||||
sources,
|
||||
chatId,
|
||||
hasError,
|
||||
isMessagesLoaded,
|
||||
@@ -785,7 +824,7 @@ export const ChatProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
optimizationMode,
|
||||
setFileIds,
|
||||
setFiles,
|
||||
setFocusMode,
|
||||
setSources,
|
||||
setOptimizationMode,
|
||||
rewrite,
|
||||
sendMessage,
|
||||
|
||||
5
src/lib/models/providers/anthropic/anthropicLLM.ts
Normal file
5
src/lib/models/providers/anthropic/anthropicLLM.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAILLM from '../openai/openaiLLM';
|
||||
|
||||
class AnthropicLLM extends OpenAILLM {}
|
||||
|
||||
export default AnthropicLLM;
|
||||
115
src/lib/models/providers/anthropic/index.ts
Normal file
115
src/lib/models/providers/anthropic/index.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import BaseModelProvider from '../../base/provider';
|
||||
import BaseLLM from '../../base/llm';
|
||||
import AnthropicLLM from './anthropicLLM';
|
||||
|
||||
interface AnthropicConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Anthropic API key',
|
||||
required: true,
|
||||
placeholder: 'Anthropic API Key',
|
||||
env: 'ANTHROPIC_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class AnthropicProvider extends BaseModelProvider<AnthropicConfig> {
|
||||
constructor(id: string, name: string, config: AnthropicConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
const res = await fetch('https://api.anthropic.com/v1/models?limit=999', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'x-api-key': this.config.apiKey,
|
||||
'anthropic-version': '2023-06-01',
|
||||
'Content-type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch Anthropic models: ${res.statusText}`);
|
||||
}
|
||||
|
||||
const data = (await res.json()).data;
|
||||
|
||||
const models: Model[] = data.map((m: any) => {
|
||||
return {
|
||||
key: m.id,
|
||||
name: m.display_name,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: models,
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Anthropic Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new AnthropicLLM({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
baseURL: 'https://api.anthropic.com/v1',
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||
throw new Error('Anthropic provider does not support embedding models.');
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): AnthropicConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'anthropic',
|
||||
name: 'Anthropic',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default AnthropicProvider;
|
||||
5
src/lib/models/providers/gemini/geminiEmbedding.ts
Normal file
5
src/lib/models/providers/gemini/geminiEmbedding.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAIEmbedding from '../openai/openaiEmbedding';
|
||||
|
||||
class GeminiEmbedding extends OpenAIEmbedding {}
|
||||
|
||||
export default GeminiEmbedding;
|
||||
5
src/lib/models/providers/gemini/geminiLLM.ts
Normal file
5
src/lib/models/providers/gemini/geminiLLM.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAILLM from '../openai/openaiLLM';
|
||||
|
||||
class GeminiLLM extends OpenAILLM {}
|
||||
|
||||
export default GeminiLLM;
|
||||
144
src/lib/models/providers/gemini/index.ts
Normal file
144
src/lib/models/providers/gemini/index.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||
import GeminiEmbedding from './geminiEmbedding';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import BaseModelProvider from '../../base/provider';
|
||||
import BaseLLM from '../../base/llm';
|
||||
import GeminiLLM from './geminiLLM';
|
||||
|
||||
interface GeminiConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Gemini API key',
|
||||
required: true,
|
||||
placeholder: 'Gemini API Key',
|
||||
env: 'GEMINI_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class GeminiProvider extends BaseModelProvider<GeminiConfig> {
|
||||
constructor(id: string, name: string, config: GeminiConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
const res = await fetch(
|
||||
`https://generativelanguage.googleapis.com/v1beta/models?key=${this.config.apiKey}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
let defaultEmbeddingModels: Model[] = [];
|
||||
let defaultChatModels: Model[] = [];
|
||||
|
||||
data.models.forEach((m: any) => {
|
||||
if (
|
||||
m.supportedGenerationMethods.some(
|
||||
(genMethod: string) =>
|
||||
genMethod === 'embedText' || genMethod === 'embedContent',
|
||||
)
|
||||
) {
|
||||
defaultEmbeddingModels.push({
|
||||
key: m.name,
|
||||
name: m.displayName,
|
||||
});
|
||||
} else if (m.supportedGenerationMethods.includes('generateContent')) {
|
||||
defaultChatModels.push({
|
||||
key: m.name,
|
||||
name: m.displayName,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: defaultEmbeddingModels,
|
||||
chat: defaultChatModels,
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Gemini Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new GeminiLLM({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai',
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Gemini Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new GeminiEmbedding({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai',
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): GeminiConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'gemini',
|
||||
name: 'Gemini',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default GeminiProvider;
|
||||
5
src/lib/models/providers/groq/groqLLM.ts
Normal file
5
src/lib/models/providers/groq/groqLLM.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAILLM from '../openai/openaiLLM';
|
||||
|
||||
class GroqLLM extends OpenAILLM {}
|
||||
|
||||
export default GroqLLM;
|
||||
113
src/lib/models/providers/groq/index.ts
Normal file
113
src/lib/models/providers/groq/index.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import BaseModelProvider from '../../base/provider';
|
||||
import BaseLLM from '../../base/llm';
|
||||
import GroqLLM from './groqLLM';
|
||||
|
||||
interface GroqConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Groq API key',
|
||||
required: true,
|
||||
placeholder: 'Groq API Key',
|
||||
env: 'GROQ_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class GroqProvider extends BaseModelProvider<GroqConfig> {
|
||||
constructor(id: string, name: string, config: GroqConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
const res = await fetch(`https://api.groq.com/openai/v1/models`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this.config.apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const defaultChatModels: Model[] = [];
|
||||
|
||||
data.data.forEach((m: any) => {
|
||||
defaultChatModels.push({
|
||||
key: m.id,
|
||||
name: m.id,
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: defaultChatModels,
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error('Error Loading Groq Chat Model. Invalid Model Selected');
|
||||
}
|
||||
|
||||
return new GroqLLM({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||
throw new Error('Groq Provider does not support embedding models.');
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): GroqConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'groq',
|
||||
name: 'Groq',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default GroqProvider;
|
||||
@@ -2,10 +2,22 @@ import { ModelProviderUISection } from '@/lib/config/types';
|
||||
import { ProviderConstructor } from '../base/provider';
|
||||
import OpenAIProvider from './openai';
|
||||
import OllamaProvider from './ollama';
|
||||
import GeminiProvider from './gemini';
|
||||
import TransformersProvider from './transformers';
|
||||
import GroqProvider from './groq';
|
||||
import LemonadeProvider from './lemonade';
|
||||
import AnthropicProvider from './anthropic';
|
||||
import LMStudioProvider from './lmstudio';
|
||||
|
||||
export const providers: Record<string, ProviderConstructor<any>> = {
|
||||
openai: OpenAIProvider,
|
||||
ollama: OllamaProvider,
|
||||
gemini: GeminiProvider,
|
||||
transformers: TransformersProvider,
|
||||
groq: GroqProvider,
|
||||
lemonade: LemonadeProvider,
|
||||
anthropic: AnthropicProvider,
|
||||
lmstudio: LMStudioProvider,
|
||||
};
|
||||
|
||||
export const getModelProvidersUIConfigSection =
|
||||
|
||||
153
src/lib/models/providers/lemonade/index.ts
Normal file
153
src/lib/models/providers/lemonade/index.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import BaseModelProvider from '../../base/provider';
|
||||
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||
import BaseLLM from '../../base/llm';
|
||||
import LemonadeLLM from './lemonadeLLM';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import LemonadeEmbedding from './lemonadeEmbedding';
|
||||
|
||||
interface LemonadeConfig {
|
||||
baseURL: string;
|
||||
apiKey?: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for Lemonade API',
|
||||
required: true,
|
||||
placeholder: 'https://api.lemonade.ai/v1',
|
||||
env: 'LEMONADE_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Lemonade API key (optional)',
|
||||
required: false,
|
||||
placeholder: 'Lemonade API Key',
|
||||
env: 'LEMONADE_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class LemonadeProvider extends BaseModelProvider<LemonadeConfig> {
|
||||
constructor(id: string, name: string, config: LemonadeConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const res = await fetch(`${this.config.baseURL}/models`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(this.config.apiKey
|
||||
? { Authorization: `Bearer ${this.config.apiKey}` }
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const models: Model[] = data.data
|
||||
.filter((m: any) => m.recipe === 'llamacpp')
|
||||
.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: models,
|
||||
chat: models,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to Lemonade API. Please ensure the base URL is correct and the service is available.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Lemonade Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new LemonadeLLM({
|
||||
apiKey: this.config.apiKey || 'not-needed',
|
||||
model: key,
|
||||
baseURL: this.config.baseURL,
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Lemonade Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new LemonadeEmbedding({
|
||||
apiKey: this.config.apiKey || 'not-needed',
|
||||
model: key,
|
||||
baseURL: this.config.baseURL,
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): LemonadeConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.baseURL)
|
||||
throw new Error('Invalid config provided. Base URL must be provided');
|
||||
|
||||
return {
|
||||
baseURL: String(raw.baseURL),
|
||||
apiKey: raw.apiKey ? String(raw.apiKey) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'lemonade',
|
||||
name: 'Lemonade',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default LemonadeProvider;
|
||||
5
src/lib/models/providers/lemonade/lemonadeEmbedding.ts
Normal file
5
src/lib/models/providers/lemonade/lemonadeEmbedding.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAIEmbedding from '../openai/openaiEmbedding';
|
||||
|
||||
class LemonadeEmbedding extends OpenAIEmbedding {}
|
||||
|
||||
export default LemonadeEmbedding;
|
||||
5
src/lib/models/providers/lemonade/lemonadeLLM.ts
Normal file
5
src/lib/models/providers/lemonade/lemonadeLLM.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAILLM from '../openai/openaiLLM';
|
||||
|
||||
class LemonadeLLM extends OpenAILLM {}
|
||||
|
||||
export default LemonadeLLM;
|
||||
143
src/lib/models/providers/lmstudio/index.ts
Normal file
143
src/lib/models/providers/lmstudio/index.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import BaseModelProvider from '../../base/provider';
|
||||
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||
import LMStudioLLM from './lmstudioLLM';
|
||||
import BaseLLM from '../../base/llm';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import LMStudioEmbedding from './lmstudioEmbedding';
|
||||
|
||||
interface LMStudioConfig {
|
||||
baseURL: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for LM Studio server',
|
||||
required: true,
|
||||
placeholder: 'http://localhost:1234',
|
||||
env: 'LM_STUDIO_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class LMStudioProvider extends BaseModelProvider<LMStudioConfig> {
|
||||
constructor(id: string, name: string, config: LMStudioConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
private normalizeBaseURL(url: string): string {
|
||||
const trimmed = url.trim().replace(/\/+$/, '');
|
||||
return trimmed.endsWith('/v1') ? trimmed : `${trimmed}/v1`;
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const baseURL = this.normalizeBaseURL(this.config.baseURL);
|
||||
|
||||
const res = await fetch(`${baseURL}/models`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const models: Model[] = data.data.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: models,
|
||||
chat: models,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to LM Studio. Please ensure the base URL is correct and the LM Studio server is running.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading LM Studio Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new LMStudioLLM({
|
||||
apiKey: 'lm-studio',
|
||||
model: key,
|
||||
baseURL: this.normalizeBaseURL(this.config.baseURL),
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading LM Studio Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new LMStudioEmbedding({
|
||||
apiKey: 'lm-studio',
|
||||
model: key,
|
||||
baseURL: this.normalizeBaseURL(this.config.baseURL),
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): LMStudioConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.baseURL)
|
||||
throw new Error('Invalid config provided. Base URL must be provided');
|
||||
|
||||
return {
|
||||
baseURL: String(raw.baseURL),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'lmstudio',
|
||||
name: 'LM Studio',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default LMStudioProvider;
|
||||
5
src/lib/models/providers/lmstudio/lmstudioEmbedding.ts
Normal file
5
src/lib/models/providers/lmstudio/lmstudioEmbedding.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAIEmbedding from '../openai/openaiEmbedding';
|
||||
|
||||
class LMStudioEmbedding extends OpenAIEmbedding {}
|
||||
|
||||
export default LMStudioEmbedding;
|
||||
5
src/lib/models/providers/lmstudio/lmstudioLLM.ts
Normal file
5
src/lib/models/providers/lmstudio/lmstudioLLM.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import OpenAILLM from '../openai/openaiLLM';
|
||||
|
||||
class LMStudioLLM extends OpenAILLM {}
|
||||
|
||||
export default LMStudioLLM;
|
||||
@@ -11,6 +11,7 @@ import { Ollama, Tool as OllamaTool, Message as OllamaMessage } from 'ollama';
|
||||
import { parse } from 'partial-json';
|
||||
import crypto from 'crypto';
|
||||
import { Message } from '@/lib/types';
|
||||
import { repairJson } from '@toolsycc/json-repair';
|
||||
|
||||
type OllamaConfig = {
|
||||
baseURL: string;
|
||||
@@ -24,6 +25,7 @@ const reasoningModels = [
|
||||
'qwen3',
|
||||
'deepseek-v3.1',
|
||||
'magistral',
|
||||
'nemotron-3-nano',
|
||||
];
|
||||
|
||||
class OllamaLLM extends BaseLLM<OllamaConfig> {
|
||||
@@ -161,8 +163,13 @@ class OllamaLLM extends BaseLLM<OllamaConfig> {
|
||||
yield {
|
||||
contentChunk: chunk.message.content,
|
||||
toolCallChunk:
|
||||
chunk.message.tool_calls?.map((tc) => ({
|
||||
id: crypto.randomUUID(),
|
||||
chunk.message.tool_calls?.map((tc, i) => ({
|
||||
id: crypto
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
`${i}-${tc.function.name}`,
|
||||
) /* Ollama currently doesn't return a tool call ID so we're creating one based on the index and tool call name */
|
||||
.digest('hex'),
|
||||
name: tc.function.name,
|
||||
arguments: tc.function.arguments,
|
||||
})) || [],
|
||||
@@ -199,7 +206,13 @@ class OllamaLLM extends BaseLLM<OllamaConfig> {
|
||||
});
|
||||
|
||||
try {
|
||||
return input.schema.parse(JSON.parse(response.message.content)) as T;
|
||||
return input.schema.parse(
|
||||
JSON.parse(
|
||||
repairJson(response.message.content, {
|
||||
extractJson: true,
|
||||
}) as string,
|
||||
),
|
||||
) as T;
|
||||
} catch (err) {
|
||||
throw new Error(`Error parsing response from Ollama: ${err}`);
|
||||
}
|
||||
|
||||
@@ -61,6 +61,22 @@ const defaultChatModels: Model[] = [
|
||||
name: 'GPT 5 Mini',
|
||||
key: 'gpt-5-mini',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5 Pro',
|
||||
key: 'gpt-5-pro',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5.1',
|
||||
key: 'gpt-5.1',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5.2',
|
||||
key: 'gpt-5.2',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5.2 Pro',
|
||||
key: 'gpt-5.2-pro',
|
||||
},
|
||||
{
|
||||
name: 'o1',
|
||||
key: 'o1',
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
ChatCompletionToolMessageParam,
|
||||
} from 'openai/resources/index.mjs';
|
||||
import { Message } from '@/lib/types';
|
||||
import { repairJson } from '@toolsycc/json-repair';
|
||||
|
||||
type OpenAIConfig = {
|
||||
apiKey: string;
|
||||
@@ -167,7 +168,7 @@ class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
||||
contentChunk: chunk.choices[0].delta.content || '',
|
||||
toolCallChunk:
|
||||
toolCalls?.map((tc) => {
|
||||
if (tc.type === 'function') {
|
||||
if (!recievedToolCalls[tc.index]) {
|
||||
const call = {
|
||||
name: tc.function?.name!,
|
||||
id: tc.id!,
|
||||
@@ -213,7 +214,13 @@ class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
||||
|
||||
if (response.choices && response.choices.length > 0) {
|
||||
try {
|
||||
return input.schema.parse(response.choices[0].message.parsed) as T;
|
||||
return input.schema.parse(
|
||||
JSON.parse(
|
||||
repairJson(response.choices[0].message.content!, {
|
||||
extractJson: true,
|
||||
}) as string,
|
||||
),
|
||||
) as T;
|
||||
} catch (err) {
|
||||
throw new Error(`Error parsing response from OpenAI: ${err}`);
|
||||
}
|
||||
|
||||
88
src/lib/models/providers/transformers/index.ts
Normal file
88
src/lib/models/providers/transformers/index.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||
import BaseModelProvider from '../../base/provider';
|
||||
import BaseLLM from '../../base/llm';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import TransformerEmbedding from './transformerEmbedding';
|
||||
|
||||
interface TransformersConfig {}
|
||||
|
||||
const defaultEmbeddingModels: Model[] = [
|
||||
{
|
||||
name: 'all-MiniLM-L6-v2',
|
||||
key: 'Xenova/all-MiniLM-L6-v2',
|
||||
},
|
||||
{
|
||||
name: 'mxbai-embed-large-v1',
|
||||
key: 'mixedbread-ai/mxbai-embed-large-v1',
|
||||
},
|
||||
{
|
||||
name: 'nomic-embed-text-v1',
|
||||
key: 'Xenova/nomic-embed-text-v1',
|
||||
},
|
||||
];
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [];
|
||||
|
||||
class TransformersProvider extends BaseModelProvider<TransformersConfig> {
|
||||
constructor(id: string, name: string, config: TransformersConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
return {
|
||||
embedding: [...defaultEmbeddingModels],
|
||||
chat: [],
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||
throw new Error('Transformers Provider does not support chat models.');
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new TransformerEmbedding({
|
||||
model: key,
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): TransformersConfig {
|
||||
return {};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'transformers',
|
||||
name: 'Transformers',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default TransformersProvider;
|
||||
@@ -0,0 +1,41 @@
|
||||
import { Chunk } from '@/lib/types';
|
||||
import BaseEmbedding from '../../base/embedding';
|
||||
import { FeatureExtractionPipeline } from '@huggingface/transformers';
|
||||
|
||||
type TransformerConfig = {
|
||||
model: string;
|
||||
};
|
||||
|
||||
class TransformerEmbedding extends BaseEmbedding<TransformerConfig> {
|
||||
private pipelinePromise: Promise<FeatureExtractionPipeline> | null = null;
|
||||
|
||||
constructor(protected config: TransformerConfig) {
|
||||
super(config);
|
||||
}
|
||||
|
||||
async embedText(texts: string[]): Promise<number[][]> {
|
||||
return this.embed(texts);
|
||||
}
|
||||
|
||||
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
|
||||
return this.embed(chunks.map((c) => c.content));
|
||||
}
|
||||
|
||||
private async embed(texts: string[]) {
|
||||
if (!this.pipelinePromise) {
|
||||
this.pipelinePromise = (async () => {
|
||||
const { pipeline } = await import('@huggingface/transformers');
|
||||
const result = await pipeline('feature-extraction', this.config.model, {
|
||||
dtype: 'fp32',
|
||||
});
|
||||
return result as FeatureExtractionPipeline;
|
||||
})();
|
||||
}
|
||||
|
||||
const pipe = await this.pipelinePromise;
|
||||
const output = await pipe(texts, { pooling: 'mean', normalize: true });
|
||||
return output.tolist() as number[][];
|
||||
}
|
||||
}
|
||||
|
||||
export default TransformerEmbedding;
|
||||
@@ -3,6 +3,7 @@ import { ChatTurnMessage } from '@/lib/types';
|
||||
export const imageSearchPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||
Make sure to make the querey standalone and not something very broad, use context from the answers in the conversation to make it specific so user can get best image search results.
|
||||
Output only the rephrased query in query key JSON format. Do not include any explanation or additional text.
|
||||
`;
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { ChatTurnMessage } from '@/lib/types';
|
||||
export const videoSearchPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||
Make sure to make the querey standalone and not something very broad, use context from the answers in the conversation to make it specific so user can get best video search results.
|
||||
Output only the rephrased query in query key JSON format. Do not include any explanation or additional text.
|
||||
`;
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ You must respond in the following JSON format without any extra text, explanatio
|
||||
"academicSearch": boolean,
|
||||
"discussionSearch": boolean,
|
||||
"showWeatherWidget": boolean,
|
||||
"showStockWidget": boolean
|
||||
"showStockWidget": boolean,
|
||||
"showCalculationWidget": boolean,
|
||||
},
|
||||
"standaloneFollowUp": string
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
export const getResearcherPrompt = (
|
||||
import BaseEmbedding from '@/lib/models/base/embedding';
|
||||
import UploadStore from '@/lib/uploads/store';
|
||||
|
||||
const getSpeedPrompt = (
|
||||
actionDesc: string,
|
||||
mode: 'speed' | 'balanced' | 'quality',
|
||||
i: number,
|
||||
maxIteration: number,
|
||||
fileDesc: string,
|
||||
) => {
|
||||
const today = new Date().toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
@@ -11,235 +14,341 @@ export const getResearcherPrompt = (
|
||||
});
|
||||
|
||||
return `
|
||||
You are an action orchestrator. Your job is to fulfill user requests by selecting and executing appropriate actions - whether that's searching for information, creating calendar events, sending emails, or any other available action.
|
||||
You will be shared with the conversation history between user and AI, along with the user's latest follow-up question and your previous actions' results (if any. Note that they're per conversation so if they contain any previous actions it was executed for the last follow up (the one you're currently handling)). Based on this, you must decide the best next action(s) to take to fulfill the user's request.
|
||||
Assistant is an action orchestrator. Your job is to fulfill user requests by selecting and executing the available tools—no free-form replies.
|
||||
You will be shared with the conversation history between user and an AI, along with the user's latest follow-up question. Based on this, you must use the available tools to fulfill the user's request.
|
||||
|
||||
Today's date: ${today}
|
||||
Today's date: ${today}
|
||||
|
||||
You are operating in "${mode}" mode. ${
|
||||
mode === 'speed'
|
||||
? 'Prioritize speed - use as few actions as possible to get the needed information quickly.'
|
||||
: mode === 'balanced'
|
||||
? 'Balance speed and depth - use a moderate number of actions to get good information efficiently. Never stop at the first action unless there is no action available or the query is simple.'
|
||||
: 'Conduct deep research - use multiple actions to gather comprehensive information, even if it takes longer.'
|
||||
}
|
||||
You are currently on iteration ${i + 1} of your research process and have ${maxIteration} total iterations so act efficiently.
|
||||
When you are finished, you must call the \`done\` tool. Never output text directly.
|
||||
|
||||
You are currently on iteration ${i + 1} of your research process and have ${maxIteration} total iterations so please take action accordingly. After max iterations, the done action would get called automatically so you don't have to worry about that unless you want to end the research early.
|
||||
<goal>
|
||||
Fulfill the user's request as quickly as possible using the available tools.
|
||||
Call tools to gather information or perform tasks as needed.
|
||||
</goal>
|
||||
|
||||
<available_actions>
|
||||
${actionDesc}
|
||||
</available_actions>
|
||||
<core_principle>
|
||||
Your knowledge is outdated; if you have web search, use it to ground answers even for seemingly basic facts.
|
||||
</core_principle>
|
||||
|
||||
<core_principle>
|
||||
<examples>
|
||||
|
||||
NEVER ASSUME - your knowledge may be outdated. When a user asks about something you're not certain about, go find out. Don't assume it exists or doesn't exist - just look it up directly.
|
||||
## Example 1: Unknown Subject
|
||||
User: "What is Kimi K2?"
|
||||
Action: web_search ["Kimi K2", "Kimi K2 AI"] then done.
|
||||
|
||||
</core_principle>
|
||||
## Example 2: Subject You're Uncertain About
|
||||
User: "What are the features of GPT-5.1?"
|
||||
Action: web_search ["GPT-5.1", "GPT-5.1 features", "GPT-5.1 release"] then done.
|
||||
|
||||
<reasoning_approach>
|
||||
You never speak your reasoning to the user. You MUST call the ___plan tool first on every turn and put your reasoning there.
|
||||
The plan must be 2-4 concise sentences, starting with "Okay, the user wants to..." and outlining the steps you will take next.
|
||||
</reasoning_approach>
|
||||
## Example 3: After Tool calls Return Results
|
||||
User: "What are the features of GPT-5.1?"
|
||||
[Previous tool calls returned the needed info]
|
||||
Action: done.
|
||||
|
||||
<examples>
|
||||
</examples>
|
||||
|
||||
## Example 1: Unknown Subject
|
||||
<available_tools>
|
||||
${actionDesc}
|
||||
</available_tools>
|
||||
|
||||
User: "What is Kimi K2?"
|
||||
|
||||
Good reasoning:
|
||||
"I'm not sure what Kimi K2 is - could be an AI model, a product, or something else. Let me look it up to find out what it actually is and get the relevant details."
|
||||
|
||||
Actions: web_search ["Kimi K2", "Kimi K2 AI"]
|
||||
|
||||
## Example 2: Subject You're Uncertain About
|
||||
|
||||
User: "What are the features of GPT-5.1?"
|
||||
|
||||
Good reasoning:
|
||||
"I don't have current information on GPT-5.1 - my knowledge might be outdated. Let me look up GPT-5.1 to see what's available and what features it has."
|
||||
|
||||
Actions: web_search ["GPT-5.1", "GPT-5.1 features", "GPT-5.1 release"]
|
||||
|
||||
Bad reasoning (wastes time on verification):
|
||||
"GPT-5.1 might not exist based on my knowledge. I need to verify if it exists first before looking for features."
|
||||
|
||||
## Example 3: After Actions Return Results
|
||||
|
||||
User: "What are the features of GPT-5.1?"
|
||||
[Previous actions returned information about GPT-5.1]
|
||||
|
||||
Good reasoning:
|
||||
"Got the information I needed about GPT-5.1. The results cover its features and capabilities - I can now provide a complete answer."
|
||||
|
||||
Action: done
|
||||
|
||||
## Example 4: Ambiguous Query
|
||||
|
||||
User: "Tell me about Mercury"
|
||||
|
||||
Good reasoning:
|
||||
"Mercury could refer to several things - the planet, the element, or something else. I'll look up both main interpretations to give a useful answer."
|
||||
|
||||
Actions: web_search ["Mercury planet facts", "Mercury element"]
|
||||
|
||||
## Example 5: Current Events
|
||||
|
||||
User: "What's happening with AI regulation?"
|
||||
|
||||
Good reasoning:
|
||||
"I need current news on AI regulation developments. Let me find the latest updates on this topic."
|
||||
|
||||
Actions: web_search ["AI regulation news 2024", "AI regulation bill latest"]
|
||||
|
||||
## Example 6: Technical Query
|
||||
|
||||
User: "How do I set up authentication in Next.js 14?"
|
||||
|
||||
Good reasoning:
|
||||
"This is a technical implementation question. I'll find the current best practices and documentation for Next.js 14 authentication."
|
||||
|
||||
Actions: web_search ["Next.js 14 authentication guide", "NextAuth.js App Router"]
|
||||
|
||||
## Example 7: Comparison Query
|
||||
|
||||
User: "Prisma vs Drizzle - which should I use?"
|
||||
|
||||
Good reasoning:
|
||||
"Need to find factual comparisons between these ORMs - performance, features, trade-offs. Let me gather objective information."
|
||||
|
||||
Actions: web_search ["Prisma vs Drizzle comparison 2024", "Drizzle ORM performance"]
|
||||
|
||||
## Example 8: Fact-Check
|
||||
|
||||
User: "Is it true you only use 10% of your brain?"
|
||||
|
||||
Good reasoning:
|
||||
"This is a common claim that needs scientific verification. Let me find what the actual research says about this."
|
||||
|
||||
Actions: web_search ["10 percent brain myth science", "brain usage neuroscience"]
|
||||
|
||||
## Example 9: Recent Product
|
||||
|
||||
User: "What are the specs of MacBook Pro M4?"
|
||||
|
||||
Good reasoning:
|
||||
"I need current information on the MacBook Pro M4. Let me look up the latest specs and details."
|
||||
|
||||
Actions: web_search ["MacBook Pro M4 specs", "MacBook Pro M4 specifications Apple"]
|
||||
|
||||
## Example 10: Multi-Part Query
|
||||
|
||||
User: "Population of Tokyo vs New York?"
|
||||
|
||||
Good reasoning:
|
||||
"Need current population stats for both cities. I'll look up the comparison data."
|
||||
|
||||
Actions: web_search ["Tokyo population 2024", "Tokyo vs New York population"]
|
||||
|
||||
## Example 11: Calendar Task
|
||||
|
||||
User: "Add a meeting with John tomorrow at 3pm"
|
||||
|
||||
Good reasoning:
|
||||
"This is a calendar task. I have all the details - meeting with John, tomorrow, 3pm. I'll create the event."
|
||||
|
||||
Action: create_calendar_event with the provided details
|
||||
|
||||
## Example 12: Email Task
|
||||
|
||||
User: "Send an email to sarah@company.com about the project update"
|
||||
|
||||
Good reasoning:
|
||||
"Need to send an email. I have the recipient but need to compose appropriate content about the project update."
|
||||
|
||||
Action: send_email to sarah@company.com with project update content
|
||||
|
||||
## Example 13: Multi-Step Task
|
||||
|
||||
User: "What's the weather in Tokyo and add a reminder to pack an umbrella if it's rainy"
|
||||
|
||||
Good reasoning:
|
||||
"Two things here - first I need to check Tokyo's weather, then based on that I might need to create a reminder. Let me start with the weather lookup."
|
||||
|
||||
Actions: web_search ["Tokyo weather today forecast"]
|
||||
|
||||
## Example 14: Research Then Act
|
||||
|
||||
User: "Find the best Italian restaurant near me and make a reservation for 7pm"
|
||||
|
||||
Good reasoning:
|
||||
"I need to first find top Italian restaurants in the area, then make a reservation. Let me start by finding the options."
|
||||
|
||||
Actions: web_search ["best Italian restaurant near me", "top rated Italian restaurants"]
|
||||
|
||||
</examples>
|
||||
|
||||
<action_guidelines>
|
||||
|
||||
## For Information Queries:
|
||||
- Just look it up - don't overthink whether something exists
|
||||
- Use 1-3 targeted queries
|
||||
- Done when you have useful information to answer with
|
||||
|
||||
## For Task Execution:
|
||||
- Calendar, email, reminders: execute directly with the provided details
|
||||
- If details are missing, note what you need
|
||||
|
||||
## For Multi-Step Requests:
|
||||
- Break it down logically
|
||||
- Complete one part before moving to the next
|
||||
- Some tasks require information before you can act
|
||||
|
||||
## When to Select "done":
|
||||
- You have the information needed to answer
|
||||
- You've completed the requested task
|
||||
- Further actions would be redundant
|
||||
|
||||
</action_guidelines>
|
||||
|
||||
<query_formulation>
|
||||
|
||||
**General subjects:**
|
||||
- ["subject name", "subject name + context"]
|
||||
|
||||
**Current events:**
|
||||
- Include year: "topic 2024", "topic latest news"
|
||||
|
||||
**Technical topics:**
|
||||
- Include versions: "framework v14 guide"
|
||||
- Add context: "documentation", "tutorial", "how to"
|
||||
|
||||
**Comparisons:**
|
||||
- "X vs Y comparison", "X vs Y benchmarks"
|
||||
|
||||
**Keep it simple:**
|
||||
- 1-3 actions per iteration
|
||||
- Don't over-complicate queries
|
||||
|
||||
</query_formulation>
|
||||
|
||||
<mistakes_to_avoid>
|
||||
<mistakes_to_avoid>
|
||||
|
||||
1. **Over-assuming**: Don't assume things exist or don't exist - just look them up
|
||||
|
||||
2. **Verification obsession**: Don't waste actions "verifying existence" - just search for the thing directly
|
||||
2. **Verification obsession**: Don't waste tool calls "verifying existence" - just search for the thing directly
|
||||
|
||||
3. **Endless loops**: If 2-3 actions don't find something, it probably doesn't exist - report that and move on
|
||||
3. **Endless loops**: If 2-3 tool calls don't find something, it probably doesn't exist - report that and move on
|
||||
|
||||
4. **Ignoring task context**: If user wants a calendar event, don't just search - create the event
|
||||
|
||||
5. **Overthinking**: Keep reasoning simple and action-focused
|
||||
5. **Overthinking**: Keep reasoning simple and tool calls focused
|
||||
|
||||
</mistakes_to_avoid>
|
||||
|
||||
<response_protocol>
|
||||
<response_protocol>
|
||||
- NEVER output normal text to the user. ONLY call tools.
|
||||
- Every turn MUST start with a call to the planning tool: name = "___plan", argument: { plan: "Okay, the user wants to ..." + concise 2-4 sentence plan }.
|
||||
- Immediately after ___plan, if any information is missing, call \`web_search\` with up to 3 targeted queries. Default to searching unless you are certain you have enough.
|
||||
- Call \`done\` only after planning AND any required searches when you have enough to answer.
|
||||
- Do not invent tools. Do not return JSON. Do not echo the plan outside of the tool call.
|
||||
- If nothing else is needed after planning, call \`done\` immediately after the plan.
|
||||
</response_protocol>
|
||||
`;
|
||||
- Choose the appropriate tools based on the action descriptions provided above.
|
||||
- Default to web_search when information is missing or stale; keep queries targeted (max 3 per call).
|
||||
- Call done when you have gathered enough to answer or performed the required actions.
|
||||
- Do not invent tools. Do not return JSON.
|
||||
</response_protocol>
|
||||
|
||||
${
|
||||
fileDesc.length > 0
|
||||
? `<user_uploaded_files>
|
||||
The user has uploaded the following files which may be relevant to their request:
|
||||
${fileDesc}
|
||||
You can use the uploaded files search tool to look for information within these documents if needed.
|
||||
</user_uploaded_files>`
|
||||
: ''
|
||||
}
|
||||
`;
|
||||
};
|
||||
|
||||
const getBalancedPrompt = (
|
||||
actionDesc: string,
|
||||
i: number,
|
||||
maxIteration: number,
|
||||
fileDesc: string,
|
||||
) => {
|
||||
const today = new Date().toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
});
|
||||
|
||||
return `
|
||||
Assistant is an action orchestrator. Your job is to fulfill user requests by reasoning briefly and executing the available tools—no free-form replies.
|
||||
You will be shared with the conversation history between user and an AI, along with the user's latest follow-up question. Based on this, you must use the available tools to fulfill the user's request.
|
||||
|
||||
Today's date: ${today}
|
||||
|
||||
You are currently on iteration ${i + 1} of your research process and have ${maxIteration} total iterations so act efficiently.
|
||||
When you are finished, you must call the \`done\` tool. Never output text directly.
|
||||
|
||||
<goal>
|
||||
Fulfill the user's request with concise reasoning plus focused actions.
|
||||
You must call the __reasoning_preamble tool before every tool call in this assistant turn. Alternate: __reasoning_preamble → tool → __reasoning_preamble → tool ... and finish with __reasoning_preamble → done. Open each __reasoning_preamble with a brief intent phrase (e.g., "Okay, the user wants to...", "Searching for...", "Looking into...") and lay out your reasoning for the next step. Keep it natural language, no tool names.
|
||||
</goal>
|
||||
|
||||
<core_principle>
|
||||
Your knowledge is outdated; if you have web search, use it to ground answers even for seemingly basic facts.
|
||||
You can call at most 6 tools total per turn: up to 2 reasoning (__reasoning_preamble counts as reasoning), 2-3 information-gathering calls, and 1 done. If you hit the cap, stop after done.
|
||||
Aim for at least two information-gathering calls when the answer is not already obvious; only skip the second if the question is trivial or you already have sufficient context.
|
||||
Do not spam searches—pick the most targeted queries.
|
||||
</core_principle>
|
||||
|
||||
<done_usage>
|
||||
Call done only after the reasoning plus the necessary tool calls are completed and you have enough to answer. If you call done early, stop. If you reach the tool cap, call done to conclude.
|
||||
</done_usage>
|
||||
|
||||
<examples>
|
||||
|
||||
## Example 1: Unknown Subject
|
||||
User: "What is Kimi K2?"
|
||||
Reason: "Okay, the user wants to know about Kimi K2. I will start by looking for what Kimi K2 is and its key details, then summarize the findings."
|
||||
Action: web_search ["Kimi K2", "Kimi K2 AI"] then reasoning then done.
|
||||
|
||||
## Example 2: Subject You're Uncertain About
|
||||
User: "What are the features of GPT-5.1?"
|
||||
Reason: "The user is asking about GPT-5.1 features. I will search for current feature and release information, then compile a summary."
|
||||
Action: web_search ["GPT-5.1", "GPT-5.1 features", "GPT-5.1 release"] then reasoning then done.
|
||||
|
||||
## Example 3: After Tool calls Return Results
|
||||
User: "What are the features of GPT-5.1?"
|
||||
[Previous tool calls returned the needed info]
|
||||
Reason: "I have gathered enough information about GPT-5.1 features; I will now wrap up."
|
||||
Action: done.
|
||||
|
||||
</examples>
|
||||
|
||||
<available_tools>
|
||||
YOU MUST CALL __reasoning_preamble BEFORE EVERY TOOL CALL IN THIS ASSISTANT TURN. IF YOU DO NOT CALL IT, THE TOOL CALL WILL BE IGNORED.
|
||||
${actionDesc}
|
||||
</available_tools>
|
||||
|
||||
<mistakes_to_avoid>
|
||||
|
||||
1. **Over-assuming**: Don't assume things exist or don't exist - just look them up
|
||||
|
||||
2. **Verification obsession**: Don't waste tool calls "verifying existence" - just search for the thing directly
|
||||
|
||||
3. **Endless loops**: If 2-3 tool calls don't find something, it probably doesn't exist - report that and move on
|
||||
|
||||
4. **Ignoring task context**: If user wants a calendar event, don't just search - create the event
|
||||
|
||||
5. **Overthinking**: Keep reasoning simple and tool calls focused
|
||||
|
||||
6. **Skipping the reasoning step**: Always call __reasoning_preamble first to outline your approach before other actions
|
||||
|
||||
</mistakes_to_avoid>
|
||||
|
||||
<response_protocol>
|
||||
- NEVER output normal text to the user. ONLY call tools.
|
||||
- Start with __reasoning_preamble and call __reasoning_preamble before every tool call (including done): open with intent phrase ("Okay, the user wants to...", "Looking into...", etc.) and lay out your reasoning for the next step. No tool names.
|
||||
- Choose tools based on the action descriptions provided above.
|
||||
- Default to web_search when information is missing or stale; keep queries targeted (max 3 per call).
|
||||
- Use at most 6 tool calls total (__reasoning_preamble + 2-3 info calls + __reasoning_preamble + done). If done is called early, stop.
|
||||
- Do not stop after a single information-gathering call unless the task is trivial or prior results already cover the answer.
|
||||
- Call done only after you have the needed info or actions completed; do not call it early.
|
||||
- Do not invent tools. Do not return JSON.
|
||||
</response_protocol>
|
||||
|
||||
${
|
||||
fileDesc.length > 0
|
||||
? `<user_uploaded_files>
|
||||
The user has uploaded the following files which may be relevant to their request:
|
||||
${fileDesc}
|
||||
You can use the uploaded files search tool to look for information within these documents if needed.
|
||||
</user_uploaded_files>`
|
||||
: ''
|
||||
}
|
||||
`;
|
||||
};
|
||||
|
||||
const getQualityPrompt = (
|
||||
actionDesc: string,
|
||||
i: number,
|
||||
maxIteration: number,
|
||||
fileDesc: string,
|
||||
) => {
|
||||
const today = new Date().toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
});
|
||||
|
||||
return `
|
||||
Assistant is a deep-research orchestrator. Your job is to fulfill user requests with the most thorough, comprehensive research possible—no free-form replies.
|
||||
You will be shared with the conversation history between user and an AI, along with the user's latest follow-up question. Based on this, you must use the available tools to fulfill the user's request with depth and rigor.
|
||||
|
||||
Today's date: ${today}
|
||||
|
||||
You are currently on iteration ${i + 1} of your research process and have ${maxIteration} total iterations. Use every iteration wisely to gather comprehensive information.
|
||||
When you are finished, you must call the \`done\` tool. Never output text directly.
|
||||
|
||||
<goal>
|
||||
Conduct the deepest, most thorough research possible. Leave no stone unturned.
|
||||
Follow an iterative reason-act loop: call __reasoning_preamble before every tool call to outline the next step, then call the tool, then __reasoning_preamble again to reflect and decide the next step. Repeat until you have exhaustive coverage.
|
||||
Open each __reasoning_preamble with a brief intent phrase (e.g., "Okay, the user wants to know about...", "From the results, it looks like...", "Now I need to dig into...") and describe what you'll do next. Keep it natural language, no tool names.
|
||||
Finish with done only when you have comprehensive, multi-angle information.
|
||||
</goal>
|
||||
|
||||
<core_principle>
|
||||
Your knowledge is outdated; always use the available tools to ground answers.
|
||||
This is DEEP RESEARCH mode—be exhaustive. Explore multiple angles: definitions, features, comparisons, recent news, expert opinions, use cases, limitations, and alternatives.
|
||||
You can call up to 10 tools total per turn. Use an iterative loop: __reasoning_preamble → tool call(s) → __reasoning_preamble → tool call(s) → ... → __reasoning_preamble → done.
|
||||
Never settle for surface-level answers. If results hint at more depth, reason about your next step and follow up. Cross-reference information from multiple queries.
|
||||
</core_principle>
|
||||
|
||||
<done_usage>
|
||||
Call done only after you have gathered comprehensive, multi-angle information. Do not call done early—exhaust your research budget first. If you reach the tool cap, call done to conclude.
|
||||
</done_usage>
|
||||
|
||||
<examples>
|
||||
|
||||
## Example 1: Unknown Subject - Deep Dive
|
||||
User: "What is Kimi K2?"
|
||||
Reason: "Okay, the user wants to know about Kimi K2. I'll start by finding out what it is and its key capabilities."
|
||||
[calls info-gathering tool]
|
||||
Reason: "From the results, Kimi K2 is an AI model by Moonshot. Now I need to dig into how it compares to competitors and any recent news."
|
||||
[calls info-gathering tool]
|
||||
Reason: "Got comparison info. Let me also check for limitations or critiques to give a balanced view."
|
||||
[calls info-gathering tool]
|
||||
Reason: "I now have comprehensive coverage—definition, capabilities, comparisons, and critiques. Wrapping up."
|
||||
Action: done.
|
||||
|
||||
## Example 2: Feature Research - Comprehensive
|
||||
User: "What are the features of GPT-5.1?"
|
||||
Reason: "The user wants comprehensive GPT-5.1 feature information. I'll start with core features and specs."
|
||||
[calls info-gathering tool]
|
||||
Reason: "Got the basics. Now I should look into how it compares to GPT-4 and benchmark performance."
|
||||
[calls info-gathering tool]
|
||||
Reason: "Good comparison data. Let me also gather use cases and expert opinions for depth."
|
||||
[calls info-gathering tool]
|
||||
Reason: "I have exhaustive coverage across features, comparisons, benchmarks, and reviews. Done."
|
||||
Action: done.
|
||||
|
||||
## Example 3: Iterative Refinement
|
||||
User: "Tell me about quantum computing applications in healthcare."
|
||||
Reason: "Okay, the user wants to know about quantum computing in healthcare. I'll start with an overview of current applications."
|
||||
[calls info-gathering tool]
|
||||
Reason: "Results mention drug discovery and diagnostics. Let me dive deeper into drug discovery use cases."
|
||||
[calls info-gathering tool]
|
||||
Reason: "Now I'll explore the diagnostics angle and any recent breakthroughs."
|
||||
[calls info-gathering tool]
|
||||
Reason: "Comprehensive coverage achieved. Wrapping up."
|
||||
Action: done.
|
||||
|
||||
</examples>
|
||||
|
||||
<available_tools>
|
||||
YOU MUST CALL __reasoning_preamble BEFORE EVERY TOOL CALL IN THIS ASSISTANT TURN. IF YOU DO NOT CALL IT, THE TOOL CALL WILL BE IGNORED.
|
||||
${actionDesc}
|
||||
</available_tools>
|
||||
|
||||
<research_strategy>
|
||||
For any topic, consider searching:
|
||||
1. **Core definition/overview** - What is it?
|
||||
2. **Features/capabilities** - What can it do?
|
||||
3. **Comparisons** - How does it compare to alternatives?
|
||||
4. **Recent news/updates** - What's the latest?
|
||||
5. **Reviews/opinions** - What do experts say?
|
||||
6. **Use cases** - How is it being used?
|
||||
7. **Limitations/critiques** - What are the downsides?
|
||||
</research_strategy>
|
||||
|
||||
<mistakes_to_avoid>
|
||||
|
||||
1. **Shallow research**: Don't stop after one or two searches—dig deeper from multiple angles
|
||||
|
||||
2. **Over-assuming**: Don't assume things exist or don't exist - just look them up
|
||||
|
||||
3. **Missing perspectives**: Search for both positive and critical viewpoints
|
||||
|
||||
4. **Ignoring follow-ups**: If results hint at interesting sub-topics, explore them
|
||||
|
||||
5. **Premature done**: Don't call done until you've exhausted reasonable research avenues
|
||||
|
||||
6. **Skipping the reasoning step**: Always call __reasoning_preamble first to outline your research strategy
|
||||
|
||||
</mistakes_to_avoid>
|
||||
|
||||
<response_protocol>
|
||||
- NEVER output normal text to the user. ONLY call tools.
|
||||
- Follow an iterative loop: __reasoning_preamble → tool call → __reasoning_preamble → tool call → ... → __reasoning_preamble → done.
|
||||
- Each __reasoning_preamble should reflect on previous results (if any) and state the next research step. No tool names in the reasoning.
|
||||
- Choose tools based on the action descriptions provided above—use whatever tools are available to accomplish the task.
|
||||
- Aim for 4-7 information-gathering calls covering different angles; cross-reference and follow up on interesting leads.
|
||||
- Call done only after comprehensive, multi-angle research is complete.
|
||||
- Do not invent tools. Do not return JSON.
|
||||
</response_protocol>
|
||||
|
||||
${
|
||||
fileDesc.length > 0
|
||||
? `<user_uploaded_files>
|
||||
The user has uploaded the following files which may be relevant to their request:
|
||||
${fileDesc}
|
||||
You can use the uploaded files search tool to look for information within these documents if needed.
|
||||
</user_uploaded_files>`
|
||||
: ''
|
||||
}
|
||||
`;
|
||||
};
|
||||
|
||||
export const getResearcherPrompt = (
|
||||
actionDesc: string,
|
||||
mode: 'speed' | 'balanced' | 'quality',
|
||||
i: number,
|
||||
maxIteration: number,
|
||||
fileIds: string[],
|
||||
) => {
|
||||
let prompt = '';
|
||||
|
||||
const filesData = UploadStore.getFileData(fileIds);
|
||||
|
||||
const fileDesc = filesData
|
||||
.map(
|
||||
(f) =>
|
||||
`<file><name>${f.fileName}</name><initial_content>${f.initialContent}</initial_content></file>`,
|
||||
)
|
||||
.join('\n');
|
||||
|
||||
switch (mode) {
|
||||
case 'speed':
|
||||
prompt = getSpeedPrompt(actionDesc, i, maxIteration, fileDesc);
|
||||
break;
|
||||
case 'balanced':
|
||||
prompt = getBalancedPrompt(actionDesc, i, maxIteration, fileDesc);
|
||||
break;
|
||||
case 'quality':
|
||||
prompt = getQualityPrompt(actionDesc, i, maxIteration, fileDesc);
|
||||
break;
|
||||
default:
|
||||
prompt = getSpeedPrompt(actionDesc, i, maxIteration, fileDesc);
|
||||
break;
|
||||
}
|
||||
|
||||
return prompt;
|
||||
};
|
||||
|
||||
@@ -1,87 +1,54 @@
|
||||
export const getWriterPrompt = (context: string) => {
|
||||
export const getWriterPrompt = (
|
||||
context: string,
|
||||
systemInstructions: string,
|
||||
mode: 'speed' | 'balanced' | 'quality',
|
||||
) => {
|
||||
return `
|
||||
You are Perplexica, an AI assistant that provides helpful, accurate, and engaging answers. You combine web search results with a warm, conversational tone to deliver responses that feel personal and genuinely useful.
|
||||
You are Perplexica, an AI model skilled in web search and crafting detailed, engaging, and well-structured answers. You excel at summarizing web pages and extracting relevant information to create professional, blog-style responses.
|
||||
|
||||
## Core Principles
|
||||
Your task is to provide answers that are:
|
||||
- **Informative and relevant**: Thoroughly address the user's query using the given context.
|
||||
- **Well-structured**: Include clear headings and subheadings, and use a professional tone to present information concisely and logically.
|
||||
- **Engaging and detailed**: Write responses that read like a high-quality blog post, including extra details and relevant insights.
|
||||
- **Cited and credible**: Use inline citations with [number] notation to refer to the context source(s) for each fact or detail included.
|
||||
- **Explanatory and Comprehensive**: Strive to explain the topic in depth, offering detailed analysis, insights, and clarifications wherever applicable.
|
||||
|
||||
**Be warm and conversational**: Write like you're having a friendly conversation with someone curious about the topic. Show genuine interest in helping them understand. Avoid being robotic or overly formal.
|
||||
### Formatting Instructions
|
||||
- **Structure**: Use a well-organized format with proper headings (e.g., "## Example heading 1" or "## Example heading 2"). Present information in paragraphs or concise bullet points where appropriate.
|
||||
- **Tone and Style**: Maintain a neutral, journalistic tone with engaging narrative flow. Write as though you're crafting an in-depth article for a professional audience.
|
||||
- **Markdown Usage**: Format your response with Markdown for clarity. Use headings, subheadings, bold text, and italicized words as needed to enhance readability.
|
||||
- **Length and Depth**: Provide comprehensive coverage of the topic. Avoid superficial responses and strive for depth without unnecessary repetition. Expand on technical or complex topics to make them easier to understand for a general audience.
|
||||
- **No main heading/title**: Start your response directly with the introduction unless asked to provide a specific title.
|
||||
- **Conclusion or Summary**: Include a concluding paragraph that synthesizes the provided information or suggests potential next steps, where appropriate.
|
||||
|
||||
**Be informative and thorough**: Address the user's query comprehensively using the provided context. Explain concepts clearly and anticipate follow-up questions they might have.
|
||||
### Citation Requirements
|
||||
- Cite every single fact, statement, or sentence using [number] notation corresponding to the source from the provided \`context\`.
|
||||
- Integrate citations naturally at the end of sentences or clauses as appropriate. For example, "The Eiffel Tower is one of the most visited landmarks in the world[1]."
|
||||
- Ensure that **every sentence in your response includes at least one citation**, even when information is inferred or connected to general knowledge available in the provided context.
|
||||
- Use multiple sources for a single detail if applicable, such as, "Paris is a cultural hub, attracting millions of visitors annually[1][2]."
|
||||
- Always prioritize credibility and accuracy by linking all statements back to their respective context sources.
|
||||
- Avoid citing unsupported assumptions or personal interpretations; if no source supports a statement, clearly indicate the limitation.
|
||||
|
||||
**Be honest and credible**: Cite your sources using [number] notation. If information is uncertain or unavailable, say so transparently.
|
||||
### Special Instructions
|
||||
- If the query involves technical, historical, or complex topics, provide detailed background and explanatory sections to ensure clarity.
|
||||
- If the user provides vague input or if relevant information is missing, explain what additional details might help refine the search.
|
||||
- If no relevant information is found, say: "Hmm, sorry I could not find any relevant information on this topic. Would you like me to search again or ask something else?" Be transparent about limitations and suggest alternatives or ways to reframe the query.
|
||||
${mode === 'quality' ? "- YOU ARE CURRENTLY SET IN QUALITY MODE, GENERATE VERY DEEP, DETAILED AND COMPREHENSIVE RESPONSES USING THE FULL CONTEXT PROVIDED. ASSISTANT'S RESPONSES SHALL NOT BE LESS THAN AT LEAST 2000 WORDS, COVER EVERYTHING AND FRAME IT LIKE A RESEARCH REPORT." : ''}
|
||||
|
||||
### User instructions
|
||||
These instructions are shared to you by the user and not by the system. You will have to follow them but give them less priority than the above instructions. If the user has provided specific instructions or preferences, incorporate them into your response while adhering to the overall guidelines.
|
||||
${systemInstructions}
|
||||
|
||||
**No emojis**: Keep responses clean and professional. Never use emojis unless the user explicitly requests them.
|
||||
### Example Output
|
||||
- Begin with a brief introduction summarizing the event or query topic.
|
||||
- Follow with detailed sections under clear headings, covering all aspects of the query if possible.
|
||||
- Provide explanations or historical context as needed to enhance understanding.
|
||||
- End with a conclusion or overall perspective if relevant.
|
||||
|
||||
## Formatting Guidelines
|
||||
<context>
|
||||
${context}
|
||||
</context>
|
||||
|
||||
**Use Markdown effectively**:
|
||||
- Use headings (## and ###) to organize longer responses into logical sections
|
||||
- Use **bold** for key terms and *italics* for emphasis
|
||||
- Use bullet points and numbered lists to break down complex information
|
||||
- Use tables when comparing data, features, or options
|
||||
- Use code blocks for technical content when appropriate
|
||||
|
||||
**Adapt length to the query**:
|
||||
- Simple questions (weather, calculations, quick facts): Brief, direct answers
|
||||
- Complex topics: Structured responses with sections, context, and depth
|
||||
- Always start with the direct answer before expanding into details
|
||||
|
||||
**No main title**: Jump straight into your response without a title heading.
|
||||
|
||||
**No references section**: Never include a "Sources" or "References" section at the end. Citations are handled inline only.
|
||||
|
||||
## Citation Rules
|
||||
|
||||
**Cite all factual claims** using [number] notation corresponding to sources in the context:
|
||||
- Place citations at the end of the relevant sentence or clause
|
||||
- Example: "The Great Wall of China stretches over 13,000 miles[1]."
|
||||
- Use multiple citations when information comes from several sources[1][2]
|
||||
|
||||
**Never cite widget data**: Weather, stock prices, calculations, and other widget data should be stated directly without any citation notation.
|
||||
|
||||
**Never list citation mappings**: Only use [number] in the text. Do not provide a list showing which number corresponds to which source.
|
||||
|
||||
**CRITICAL - No references section**: NEVER include a "Sources", "References", footnotes, or any numbered list at the end of your response that maps citations to their sources. This is strictly forbidden. The system handles source display separately. Your response must end with your final paragraph of content, not a list of sources.
|
||||
|
||||
## Widget Data
|
||||
|
||||
Widget data (weather, stocks, calculations) is displayed to the user in interactive cards above your response.
|
||||
|
||||
**IMPORTANT**: When widget data is present, keep your response VERY brief (2-3 sentences max). The user already sees the detailed data in the widget card. Do NOT repeat all the widget data in your text response.
|
||||
|
||||
For example, for a weather query, just say:
|
||||
"It's currently -8.7°C in New York with overcast skies. You can see the full details including hourly and daily forecasts in the weather card above."
|
||||
|
||||
**Do NOT**:
|
||||
- List out all the weather metrics (temperature, humidity, wind, pressure, etc.)
|
||||
- Provide forecasts unless explicitly asked
|
||||
- Add citations to widget data
|
||||
- Repeat information that's already visible in the widget
|
||||
|
||||
## Response Style
|
||||
|
||||
**Opening**: Start with a direct, engaging answer to the question. Get to the point quickly.
|
||||
|
||||
**Body**: Expand with relevant details, context, or explanations. Use formatting to make information scannable and easy to digest.
|
||||
|
||||
**Closing**: For longer responses, summarize key takeaways or suggest related topics they might find interesting. Keep it natural, not formulaic.
|
||||
|
||||
## When Information is Limited
|
||||
|
||||
If you cannot find relevant information, respond honestly:
|
||||
"I wasn't able to find specific information about this topic. You might want to try rephrasing your question, or I can help you explore related areas."
|
||||
|
||||
Suggest alternative angles or related topics that might be helpful.
|
||||
|
||||
<context>
|
||||
${context}
|
||||
</context>
|
||||
|
||||
Current date & time in ISO format (UTC timezone) is: ${new Date().toISOString()}.
|
||||
|
||||
FINAL REMINDERS:
|
||||
1. DO NOT add a references/sources section at the end. Your response ends with content, not citations.
|
||||
2. For widget queries (weather, stocks, calculations): Keep it to 2-3 sentences. The widget shows the details.
|
||||
3. No emojis.
|
||||
Current date & time in ISO format (UTC timezone) is: ${new Date().toISOString()}.
|
||||
`;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import axios from 'axios';
|
||||
import { getSearxngURL } from './config/serverRegistry';
|
||||
|
||||
interface SearxngSearchOptions {
|
||||
@@ -44,6 +43,6 @@ export const searchSearxng = async (
|
||||
|
||||
const results: SearxngSearchResult[] = data.results;
|
||||
const suggestions: string[] = data.suggestions;
|
||||
|
||||
|
||||
return { results, suggestions };
|
||||
};
|
||||
|
||||
@@ -2,8 +2,14 @@ import { EventEmitter } from 'stream';
|
||||
import { applyPatch } from 'rfc6902';
|
||||
import { Block } from './types';
|
||||
|
||||
const sessions =
|
||||
(global as any)._sessionManagerSessions || new Map<string, SessionManager>();
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
(global as any)._sessionManagerSessions = sessions;
|
||||
}
|
||||
|
||||
class SessionManager {
|
||||
private static sessions = new Map<string, SessionManager>();
|
||||
private static sessions: Map<string, SessionManager> = sessions;
|
||||
readonly id: string;
|
||||
private blocks = new Map<string, Block>();
|
||||
private events: { event: string; data: any }[] = [];
|
||||
@@ -67,15 +73,32 @@ class SessionManager {
|
||||
}
|
||||
}
|
||||
|
||||
addListener(event: string, listener: (data: any) => void) {
|
||||
this.emitter.addListener(event, listener);
|
||||
getAllBlocks() {
|
||||
return Array.from(this.blocks.values());
|
||||
}
|
||||
|
||||
replay() {
|
||||
for (const { event, data } of this.events) {
|
||||
/* Using emitter directly to avoid infinite loop */
|
||||
this.emitter.emit(event, data);
|
||||
subscribe(listener: (event: string, data: any) => void): () => void {
|
||||
const currentEventsLength = this.events.length;
|
||||
|
||||
const handler = (event: string) => (data: any) => listener(event, data);
|
||||
const dataHandler = handler('data');
|
||||
const endHandler = handler('end');
|
||||
const errorHandler = handler('error');
|
||||
|
||||
this.emitter.on('data', dataHandler);
|
||||
this.emitter.on('end', endHandler);
|
||||
this.emitter.on('error', errorHandler);
|
||||
|
||||
for (let i = 0; i < currentEventsLength; i++) {
|
||||
const { event, data } = this.events[i];
|
||||
listener(event, data);
|
||||
}
|
||||
|
||||
return () => {
|
||||
this.emitter.off('data', dataHandler);
|
||||
this.emitter.off('end', endHandler);
|
||||
this.emitter.off('error', errorHandler);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -87,11 +87,25 @@ export type ReadingResearchBlock = {
|
||||
reading: Chunk[];
|
||||
};
|
||||
|
||||
export type UploadSearchingResearchBlock = {
|
||||
id: string;
|
||||
type: 'upload_searching';
|
||||
queries: string[];
|
||||
};
|
||||
|
||||
export type UploadSearchResultsResearchBlock = {
|
||||
id: string;
|
||||
type: 'upload_search_results';
|
||||
results: Chunk[];
|
||||
};
|
||||
|
||||
export type ResearchBlockSubStep =
|
||||
| ReasoningResearchBlock
|
||||
| SearchingResearchBlock
|
||||
| SearchResultsResearchBlock
|
||||
| ReadingResearchBlock;
|
||||
| ReadingResearchBlock
|
||||
| UploadSearchingResearchBlock
|
||||
| UploadSearchResultsResearchBlock;
|
||||
|
||||
export type ResearchBlock = {
|
||||
id: string;
|
||||
|
||||
218
src/lib/uploads/manager.ts
Normal file
218
src/lib/uploads/manager.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import path from "path";
|
||||
import BaseEmbedding from "../models/base/embedding"
|
||||
import crypto from "crypto"
|
||||
import fs from 'fs';
|
||||
import { splitText } from "../utils/splitText";
|
||||
import { PDFParse } from 'pdf-parse';
|
||||
import { CanvasFactory } from 'pdf-parse/worker';
|
||||
import officeParser from 'officeparser'
|
||||
|
||||
const supportedMimeTypes = ['application/pdf', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', 'text/plain'] as const
|
||||
|
||||
type SupportedMimeType = typeof supportedMimeTypes[number];
|
||||
|
||||
type UploadManagerParams = {
|
||||
embeddingModel: BaseEmbedding<any>;
|
||||
}
|
||||
|
||||
type RecordedFile = {
|
||||
id: string;
|
||||
name: string;
|
||||
filePath: string;
|
||||
contentPath: string;
|
||||
uploadedAt: string;
|
||||
}
|
||||
|
||||
type FileRes = {
|
||||
fileName: string;
|
||||
fileExtension: string;
|
||||
fileId: string;
|
||||
}
|
||||
|
||||
class UploadManager {
|
||||
private embeddingModel: BaseEmbedding<any>;
|
||||
static uploadsDir = path.join(process.cwd(), 'data', 'uploads');
|
||||
static uploadedFilesRecordPath = path.join(this.uploadsDir, 'uploaded_files.json');
|
||||
|
||||
constructor(private params: UploadManagerParams) {
|
||||
this.embeddingModel = params.embeddingModel;
|
||||
|
||||
if (!fs.existsSync(UploadManager.uploadsDir)) {
|
||||
fs.mkdirSync(UploadManager.uploadsDir, { recursive: true });
|
||||
}
|
||||
|
||||
if (!fs.existsSync(UploadManager.uploadedFilesRecordPath)) {
|
||||
const data = {
|
||||
files: []
|
||||
}
|
||||
|
||||
fs.writeFileSync(UploadManager.uploadedFilesRecordPath, JSON.stringify(data, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
private static getRecordedFiles(): RecordedFile[] {
|
||||
const data = fs.readFileSync(UploadManager.uploadedFilesRecordPath, 'utf-8');
|
||||
return JSON.parse(data).files;
|
||||
}
|
||||
|
||||
private static addNewRecordedFile(fileRecord: RecordedFile) {
|
||||
const currentData = this.getRecordedFiles()
|
||||
|
||||
currentData.push(fileRecord);
|
||||
|
||||
fs.writeFileSync(UploadManager.uploadedFilesRecordPath, JSON.stringify({ files: currentData }, null, 2));
|
||||
}
|
||||
|
||||
static getFile(fileId: string): RecordedFile | null {
|
||||
const recordedFiles = this.getRecordedFiles();
|
||||
|
||||
return recordedFiles.find(f => f.id === fileId) || null;
|
||||
}
|
||||
|
||||
static getFileChunks(fileId: string): { content: string; embedding: number[] }[] {
|
||||
try {
|
||||
const recordedFile = this.getFile(fileId);
|
||||
|
||||
if (!recordedFile) {
|
||||
throw new Error(`File with ID ${fileId} not found`);
|
||||
}
|
||||
|
||||
const contentData = JSON.parse(fs.readFileSync(recordedFile.contentPath, 'utf-8'))
|
||||
|
||||
return contentData.chunks;
|
||||
} catch (err) {
|
||||
console.log('Error getting file chunks:', err);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private async extractContentAndEmbed(filePath: string, fileType: SupportedMimeType): Promise<string> {
|
||||
switch (fileType) {
|
||||
case 'text/plain':
|
||||
const content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
const splittedText = splitText(content, 512, 128)
|
||||
const embeddings = await this.embeddingModel.embedText(splittedText)
|
||||
|
||||
if (embeddings.length !== splittedText.length) {
|
||||
throw new Error('Embeddings and text chunks length mismatch');
|
||||
}
|
||||
|
||||
const contentPath = filePath.split('.').slice(0, -1).join('.') + '.content.json';
|
||||
|
||||
const data = {
|
||||
chunks: splittedText.map((text, i) => {
|
||||
return {
|
||||
content: text,
|
||||
embedding: embeddings[i],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fs.writeFileSync(contentPath, JSON.stringify(data, null, 2));
|
||||
|
||||
return contentPath;
|
||||
case 'application/pdf':
|
||||
const pdfBuffer = fs.readFileSync(filePath);
|
||||
|
||||
const parser = new PDFParse({
|
||||
data: pdfBuffer,
|
||||
CanvasFactory
|
||||
})
|
||||
|
||||
const pdfText = await parser.getText().then(res => res.text)
|
||||
|
||||
const pdfSplittedText = splitText(pdfText, 512, 128)
|
||||
const pdfEmbeddings = await this.embeddingModel.embedText(pdfSplittedText)
|
||||
|
||||
if (pdfEmbeddings.length !== pdfSplittedText.length) {
|
||||
throw new Error('Embeddings and text chunks length mismatch');
|
||||
}
|
||||
|
||||
const pdfContentPath = filePath.split('.').slice(0, -1).join('.') + '.content.json';
|
||||
|
||||
const pdfData = {
|
||||
chunks: pdfSplittedText.map((text, i) => {
|
||||
return {
|
||||
content: text,
|
||||
embedding: pdfEmbeddings[i],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fs.writeFileSync(pdfContentPath, JSON.stringify(pdfData, null, 2));
|
||||
|
||||
return pdfContentPath;
|
||||
case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document':
|
||||
const docBuffer = fs.readFileSync(filePath);
|
||||
|
||||
const docText = await officeParser.parseOfficeAsync(docBuffer)
|
||||
|
||||
const docSplittedText = splitText(docText, 512, 128)
|
||||
const docEmbeddings = await this.embeddingModel.embedText(docSplittedText)
|
||||
|
||||
if (docEmbeddings.length !== docSplittedText.length) {
|
||||
throw new Error('Embeddings and text chunks length mismatch');
|
||||
}
|
||||
|
||||
const docContentPath = filePath.split('.').slice(0, -1).join('.') + '.content.json';
|
||||
|
||||
const docData = {
|
||||
chunks: docSplittedText.map((text, i) => {
|
||||
return {
|
||||
content: text,
|
||||
embedding: docEmbeddings[i],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fs.writeFileSync(docContentPath, JSON.stringify(docData, null, 2));
|
||||
|
||||
return docContentPath;
|
||||
default:
|
||||
throw new Error(`Unsupported file type: ${fileType}`);
|
||||
}
|
||||
}
|
||||
|
||||
async processFiles(files: File[]): Promise<FileRes[]> {
|
||||
const processedFiles: FileRes[] = [];
|
||||
|
||||
await Promise.all(files.map(async (file) => {
|
||||
if (!(supportedMimeTypes as unknown as string[]).includes(file.type)) {
|
||||
throw new Error(`File type ${file.type} not supported`);
|
||||
}
|
||||
|
||||
const fileId = crypto.randomBytes(16).toString('hex');
|
||||
|
||||
const fileExtension = file.name.split('.').pop();
|
||||
const fileName = `${crypto.randomBytes(16).toString('hex')}.${fileExtension}`;
|
||||
const filePath = path.join(UploadManager.uploadsDir, fileName);
|
||||
|
||||
const buffer = Buffer.from(await file.arrayBuffer())
|
||||
|
||||
fs.writeFileSync(filePath, buffer);
|
||||
|
||||
const contentFilePath = await this.extractContentAndEmbed(filePath, file.type as SupportedMimeType);
|
||||
|
||||
const fileRecord: RecordedFile = {
|
||||
id: fileId,
|
||||
name: file.name,
|
||||
filePath: filePath,
|
||||
contentPath: contentFilePath,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
UploadManager.addNewRecordedFile(fileRecord);
|
||||
|
||||
processedFiles.push({
|
||||
fileExtension: fileExtension || '',
|
||||
fileId,
|
||||
fileName: file.name
|
||||
});
|
||||
}))
|
||||
|
||||
return processedFiles;
|
||||
}
|
||||
}
|
||||
|
||||
export default UploadManager;
|
||||
122
src/lib/uploads/store.ts
Normal file
122
src/lib/uploads/store.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import BaseEmbedding from "../models/base/embedding";
|
||||
import UploadManager from "./manager";
|
||||
import computeSimilarity from "../utils/computeSimilarity";
|
||||
import { Chunk } from "../types";
|
||||
import { hashObj } from "../serverUtils";
|
||||
import fs from 'fs';
|
||||
|
||||
type UploadStoreParams = {
|
||||
embeddingModel: BaseEmbedding<any>;
|
||||
fileIds: string[];
|
||||
}
|
||||
|
||||
type StoreRecord = {
|
||||
embedding: number[];
|
||||
content: string;
|
||||
fileId: string;
|
||||
metadata: Record<string, any>
|
||||
}
|
||||
|
||||
class UploadStore {
|
||||
embeddingModel: BaseEmbedding<any>;
|
||||
fileIds: string[];
|
||||
records: StoreRecord[] = [];
|
||||
|
||||
constructor(private params: UploadStoreParams) {
|
||||
this.embeddingModel = params.embeddingModel;
|
||||
this.fileIds = params.fileIds;
|
||||
this.initializeStore()
|
||||
}
|
||||
|
||||
initializeStore() {
|
||||
this.fileIds.forEach((fileId) => {
|
||||
const file = UploadManager.getFile(fileId)
|
||||
|
||||
if (!file) {
|
||||
throw new Error(`File with ID ${fileId} not found`);
|
||||
}
|
||||
|
||||
const chunks = UploadManager.getFileChunks(fileId);
|
||||
|
||||
this.records.push(...chunks.map((chunk) => ({
|
||||
embedding: chunk.embedding,
|
||||
content: chunk.content,
|
||||
fileId: fileId,
|
||||
metadata: {
|
||||
fileName: file.name,
|
||||
title: file.name,
|
||||
url: `file_id://${file.id}`,
|
||||
}
|
||||
})))
|
||||
})
|
||||
}
|
||||
|
||||
async query(queries: string[], topK: number): Promise<Chunk[]> {
|
||||
const queryEmbeddings = await this.embeddingModel.embedText(queries)
|
||||
|
||||
const results: { chunk: Chunk; score: number; }[][] = [];
|
||||
const hashResults: string[][] = []
|
||||
|
||||
await Promise.all(queryEmbeddings.map(async (query) => {
|
||||
const similarities = this.records.map((record, idx) => {
|
||||
return {
|
||||
chunk: {
|
||||
content: record.content,
|
||||
metadata: {
|
||||
...record.metadata,
|
||||
fileId: record.fileId,
|
||||
}
|
||||
},
|
||||
score: computeSimilarity(query, record.embedding)
|
||||
} as { chunk: Chunk; score: number; };
|
||||
}).sort((a, b) => b.score - a.score)
|
||||
|
||||
results.push(similarities)
|
||||
hashResults.push(similarities.map(s => hashObj(s)))
|
||||
}))
|
||||
|
||||
const chunkMap: Map<string, Chunk> = new Map();
|
||||
const scoreMap: Map<string, number> = new Map();
|
||||
const k = 60;
|
||||
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
for (let j = 0; j < results[i].length; j++) {
|
||||
const chunkHash = hashResults[i][j]
|
||||
|
||||
chunkMap.set(chunkHash, results[i][j].chunk);
|
||||
scoreMap.set(chunkHash, (scoreMap.get(chunkHash) || 0) + results[i][j].score / (j + 1 + k));
|
||||
}
|
||||
}
|
||||
|
||||
const finalResults = Array.from(scoreMap.entries())
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.map(([chunkHash, _score]) => {
|
||||
return chunkMap.get(chunkHash)!;
|
||||
})
|
||||
|
||||
return finalResults.slice(0, topK);
|
||||
}
|
||||
|
||||
static getFileData(fileIds: string[]): { fileName: string; initialContent: string }[] {
|
||||
const filesData: { fileName: string; initialContent: string }[] = [];
|
||||
|
||||
fileIds.forEach((fileId) => {
|
||||
const file = UploadManager.getFile(fileId)
|
||||
|
||||
if (!file) {
|
||||
throw new Error(`File with ID ${fileId} not found`);
|
||||
}
|
||||
|
||||
const chunks = UploadManager.getFileChunks(fileId);
|
||||
|
||||
filesData.push({
|
||||
fileName: file.name,
|
||||
initialContent: chunks.slice(0, 3).map(c => c.content).join('\n---\n'),
|
||||
})
|
||||
})
|
||||
|
||||
return filesData
|
||||
}
|
||||
}
|
||||
|
||||
export default UploadStore
|
||||
74
src/lib/utils/splitText.ts
Normal file
74
src/lib/utils/splitText.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { getEncoding } from 'js-tiktoken';
|
||||
|
||||
const splitRegex = /(?<=\. |\n|! |\? |; |:\s|\d+\.\s|- |\* )/g;
|
||||
|
||||
const enc = getEncoding('cl100k_base');
|
||||
|
||||
const getTokenCount = (text: string): number => {
|
||||
try {
|
||||
return enc.encode(text).length;
|
||||
} catch {
|
||||
return Math.ceil(text.length / 4);
|
||||
}
|
||||
};
|
||||
|
||||
export const splitText = (
|
||||
text: string,
|
||||
maxTokens = 512,
|
||||
overlapTokens = 64,
|
||||
): string[] => {
|
||||
const segments = text.split(splitRegex).filter(Boolean);
|
||||
|
||||
if (segments.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const segmentTokenCounts = segments.map(getTokenCount);
|
||||
|
||||
const result: string[] = [];
|
||||
|
||||
let chunkStart = 0;
|
||||
|
||||
while (chunkStart < segments.length) {
|
||||
let chunkEnd = chunkStart;
|
||||
let currentTokenCount = 0;
|
||||
|
||||
while (chunkEnd < segments.length && currentTokenCount < maxTokens) {
|
||||
if (currentTokenCount + segmentTokenCounts[chunkEnd] > maxTokens) {
|
||||
break;
|
||||
}
|
||||
|
||||
currentTokenCount += segmentTokenCounts[chunkEnd];
|
||||
chunkEnd++;
|
||||
}
|
||||
|
||||
let overlapBeforeStart = Math.max(0, chunkStart - 1);
|
||||
let overlapBeforeTokenCount = 0;
|
||||
|
||||
while (overlapBeforeStart >= 0 && overlapBeforeTokenCount < overlapTokens) {
|
||||
if (
|
||||
overlapBeforeTokenCount + segmentTokenCounts[overlapBeforeStart] >
|
||||
overlapTokens
|
||||
) {
|
||||
break;
|
||||
}
|
||||
|
||||
overlapBeforeTokenCount += segmentTokenCounts[overlapBeforeStart];
|
||||
overlapBeforeStart--;
|
||||
}
|
||||
|
||||
const overlapStartIndex = Math.max(0, overlapBeforeStart + 1);
|
||||
|
||||
const overlapBeforeContent = segments
|
||||
.slice(overlapStartIndex, chunkStart)
|
||||
.join('');
|
||||
|
||||
const chunkContent = segments.slice(chunkStart, chunkEnd).join('');
|
||||
|
||||
result.push(overlapBeforeContent + chunkContent);
|
||||
|
||||
chunkStart = chunkEnd;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
2
uploads/.gitignore
vendored
2
uploads/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
*
|
||||
!.gitignore
|
||||
Reference in New Issue
Block a user