mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-19 16:28:42 +00:00
Compare commits
15 Commits
docker-tes
...
b3aafba30c
Author | SHA1 | Date | |
---|---|---|---|
b3aafba30c | |||
9f7fd178e0 | |||
59a10d7d00 | |||
67ee9eff53 | |||
0bb860b154 | |||
c0705d1d9e | |||
73b5e8832e | |||
b2da9faeed | |||
1a2ad8a59d | |||
e0817d1008 | |||
690ef42861 | |||
b84e4e4ce6 | |||
467905d9f2 | |||
18b6f5b674 | |||
2bdcbf20fb |
@ -12,9 +12,6 @@ COPY public ./public
|
|||||||
RUN mkdir -p /home/perplexica/data
|
RUN mkdir -p /home/perplexica/data
|
||||||
RUN yarn build
|
RUN yarn build
|
||||||
|
|
||||||
RUN yarn add --dev @vercel/ncc
|
|
||||||
RUN yarn ncc build ./src/lib/db/migrate.ts -o migrator
|
|
||||||
|
|
||||||
FROM node:20.18.0-slim
|
FROM node:20.18.0-slim
|
||||||
|
|
||||||
WORKDIR /home/perplexica
|
WORKDIR /home/perplexica
|
||||||
@ -24,12 +21,7 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static
|
|||||||
|
|
||||||
COPY --from=builder /home/perplexica/.next/standalone ./
|
COPY --from=builder /home/perplexica/.next/standalone ./
|
||||||
COPY --from=builder /home/perplexica/data ./data
|
COPY --from=builder /home/perplexica/data ./data
|
||||||
COPY drizzle ./drizzle
|
|
||||||
COPY --from=builder /home/perplexica/migrator/build ./build
|
|
||||||
COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js
|
|
||||||
|
|
||||||
RUN mkdir /home/perplexica/uploads
|
RUN mkdir /home/perplexica/uploads
|
||||||
|
|
||||||
COPY entrypoint.sh ./entrypoint.sh
|
CMD ["node", "server.js"]
|
||||||
RUN chmod +x ./entrypoint.sh
|
|
||||||
CMD ["./entrypoint.sh"]
|
|
@ -16,7 +16,6 @@ services:
|
|||||||
dockerfile: app.dockerfile
|
dockerfile: app.dockerfile
|
||||||
environment:
|
environment:
|
||||||
- SEARXNG_API_URL=http://searxng:8080
|
- SEARXNG_API_URL=http://searxng:8080
|
||||||
- DATA_DIR=/home/perplexica
|
|
||||||
ports:
|
ports:
|
||||||
- 3000:3000
|
- 3000:3000
|
||||||
networks:
|
networks:
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
import { defineConfig } from 'drizzle-kit';
|
import { defineConfig } from 'drizzle-kit';
|
||||||
import path from 'path';
|
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
dialect: 'sqlite',
|
dialect: 'sqlite',
|
||||||
schema: './src/lib/db/schema.ts',
|
schema: './src/lib/db/schema.ts',
|
||||||
out: './drizzle',
|
out: './drizzle',
|
||||||
dbCredentials: {
|
dbCredentials: {
|
||||||
url: path.join(process.cwd(), 'data', 'db.sqlite'),
|
url: './data/db.sqlite',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS `chats` (
|
|
||||||
`id` text PRIMARY KEY NOT NULL,
|
|
||||||
`title` text NOT NULL,
|
|
||||||
`createdAt` text NOT NULL,
|
|
||||||
`focusMode` text NOT NULL,
|
|
||||||
`files` text DEFAULT '[]'
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
CREATE TABLE IF NOT EXISTS `messages` (
|
|
||||||
`id` integer PRIMARY KEY NOT NULL,
|
|
||||||
`content` text NOT NULL,
|
|
||||||
`chatId` text NOT NULL,
|
|
||||||
`messageId` text NOT NULL,
|
|
||||||
`type` text,
|
|
||||||
`metadata` text
|
|
||||||
);
|
|
@ -1,116 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "6",
|
|
||||||
"dialect": "sqlite",
|
|
||||||
"id": "ef3a044b-0f34-40b5-babb-2bb3a909ba27",
|
|
||||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
|
||||||
"tables": {
|
|
||||||
"chats": {
|
|
||||||
"name": "chats",
|
|
||||||
"columns": {
|
|
||||||
"id": {
|
|
||||||
"name": "id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": true,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"title": {
|
|
||||||
"name": "title",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"createdAt": {
|
|
||||||
"name": "createdAt",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"focusMode": {
|
|
||||||
"name": "focusMode",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"files": {
|
|
||||||
"name": "files",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": false,
|
|
||||||
"autoincrement": false,
|
|
||||||
"default": "'[]'"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"indexes": {},
|
|
||||||
"foreignKeys": {},
|
|
||||||
"compositePrimaryKeys": {},
|
|
||||||
"uniqueConstraints": {},
|
|
||||||
"checkConstraints": {}
|
|
||||||
},
|
|
||||||
"messages": {
|
|
||||||
"name": "messages",
|
|
||||||
"columns": {
|
|
||||||
"id": {
|
|
||||||
"name": "id",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": true,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"content": {
|
|
||||||
"name": "content",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"chatId": {
|
|
||||||
"name": "chatId",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"messageId": {
|
|
||||||
"name": "messageId",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"name": "type",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": false,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": false,
|
|
||||||
"autoincrement": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"indexes": {},
|
|
||||||
"foreignKeys": {},
|
|
||||||
"compositePrimaryKeys": {},
|
|
||||||
"uniqueConstraints": {},
|
|
||||||
"checkConstraints": {}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"views": {},
|
|
||||||
"enums": {},
|
|
||||||
"_meta": {
|
|
||||||
"schemas": {},
|
|
||||||
"tables": {},
|
|
||||||
"columns": {}
|
|
||||||
},
|
|
||||||
"internal": {
|
|
||||||
"indexes": {}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "7",
|
|
||||||
"dialect": "sqlite",
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"idx": 0,
|
|
||||||
"version": "6",
|
|
||||||
"when": 1748405503809,
|
|
||||||
"tag": "0000_fuzzy_randall",
|
|
||||||
"breakpoints": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@ -1,6 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -e
|
|
||||||
|
|
||||||
node migrate.js
|
|
||||||
|
|
||||||
exec node server.js
|
|
11024
package-lock.json
generated
Normal file
11024
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -20,6 +20,7 @@
|
|||||||
"@langchain/core": "^0.3.42",
|
"@langchain/core": "^0.3.42",
|
||||||
"@langchain/google-genai": "^0.1.12",
|
"@langchain/google-genai": "^0.1.12",
|
||||||
"@langchain/openai": "^0.0.25",
|
"@langchain/openai": "^0.0.25",
|
||||||
|
"@langchain/ollama": "^0.2.0",
|
||||||
"@langchain/textsplitters": "^0.1.0",
|
"@langchain/textsplitters": "^0.1.0",
|
||||||
"@tailwindcss/typography": "^0.5.12",
|
"@tailwindcss/typography": "^0.5.12",
|
||||||
"@xenova/transformers": "^2.17.2",
|
"@xenova/transformers": "^2.17.2",
|
||||||
@ -30,10 +31,8 @@
|
|||||||
"compute-dot": "^1.1.0",
|
"compute-dot": "^1.1.0",
|
||||||
"drizzle-orm": "^0.40.1",
|
"drizzle-orm": "^0.40.1",
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"jspdf": "^3.0.1",
|
|
||||||
"langchain": "^0.1.30",
|
"langchain": "^0.1.30",
|
||||||
"lucide-react": "^0.363.0",
|
"lucide-react": "^0.363.0",
|
||||||
"mammoth": "^1.9.1",
|
|
||||||
"markdown-to-jsx": "^7.7.2",
|
"markdown-to-jsx": "^7.7.2",
|
||||||
"next": "^15.2.2",
|
"next": "^15.2.2",
|
||||||
"next-themes": "^0.3.0",
|
"next-themes": "^0.3.0",
|
||||||
@ -51,7 +50,6 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/better-sqlite3": "^7.6.12",
|
"@types/better-sqlite3": "^7.6.12",
|
||||||
"@types/html-to-text": "^9.0.4",
|
"@types/html-to-text": "^9.0.4",
|
||||||
"@types/jspdf": "^2.0.0",
|
|
||||||
"@types/node": "^20",
|
"@types/node": "^20",
|
||||||
"@types/pdf-parse": "^1.1.4",
|
"@types/pdf-parse": "^1.1.4",
|
||||||
"@types/react": "^18",
|
"@types/react": "^18",
|
||||||
|
@ -20,6 +20,7 @@ import {
|
|||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import { ChatOllama } from '@langchain/ollama';
|
||||||
import { searchHandlers } from '@/lib/search';
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
|
||||||
export const runtime = 'nodejs';
|
export const runtime = 'nodejs';
|
||||||
@ -34,6 +35,7 @@ type Message = {
|
|||||||
type ChatModel = {
|
type ChatModel = {
|
||||||
provider: string;
|
provider: string;
|
||||||
name: string;
|
name: string;
|
||||||
|
ollamaContextWindow?: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
type EmbeddingModel = {
|
type EmbeddingModel = {
|
||||||
@ -232,6 +234,11 @@ export const POST = async (req: Request) => {
|
|||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
|
|
||||||
|
// Set context window size for Ollama models
|
||||||
|
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||||
|
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
@ -7,11 +7,13 @@ import {
|
|||||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
|
import { ChatOllama } from '@langchain/ollama';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
|
ollamaContextWindow?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ImageSearchBody {
|
interface ImageSearchBody {
|
||||||
@ -58,6 +60,10 @@ export const POST = async (req: Request) => {
|
|||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
|
// Set context window size for Ollama models
|
||||||
|
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||||
|
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
@ -13,12 +13,14 @@ import {
|
|||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
import { searchHandlers } from '@/lib/search';
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
import { ChatOllama } from '@langchain/ollama';
|
||||||
|
|
||||||
interface chatModel {
|
interface chatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
name: string;
|
name: string;
|
||||||
customOpenAIKey?: string;
|
customOpenAIKey?: string;
|
||||||
customOpenAIBaseURL?: string;
|
customOpenAIBaseURL?: string;
|
||||||
|
ollamaContextWindow?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface embeddingModel {
|
interface embeddingModel {
|
||||||
@ -97,6 +99,10 @@ export const POST = async (req: Request) => {
|
|||||||
.model as unknown as BaseChatModel | undefined;
|
.model as unknown as BaseChatModel | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||||
|
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
embeddingModelProviders[embeddingModelProvider] &&
|
embeddingModelProviders[embeddingModelProvider] &&
|
||||||
embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||||
|
@ -8,10 +8,12 @@ import { getAvailableChatModelProviders } from '@/lib/providers';
|
|||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
import { ChatOllama } from '@langchain/ollama';
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
|
ollamaContextWindow?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SuggestionsGenerationBody {
|
interface SuggestionsGenerationBody {
|
||||||
@ -57,6 +59,10 @@ export const POST = async (req: Request) => {
|
|||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
|
// Set context window size for Ollama models
|
||||||
|
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||||
|
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
@ -7,11 +7,13 @@ import {
|
|||||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
|
import { ChatOllama } from '@langchain/ollama';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
model: string;
|
model: string;
|
||||||
|
ollamaContextWindow?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface VideoSearchBody {
|
interface VideoSearchBody {
|
||||||
@ -58,6 +60,10 @@ export const POST = async (req: Request) => {
|
|||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
|
// Set context window size for Ollama models
|
||||||
|
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||||
|
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
@ -26,6 +26,7 @@ interface SettingsType {
|
|||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
customOpenaiModelName: string;
|
customOpenaiModelName: string;
|
||||||
|
ollamaContextWindow: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
||||||
@ -143,14 +144,20 @@ const Page = () => {
|
|||||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||||
string | null
|
string | null
|
||||||
>(null);
|
>(null);
|
||||||
const [isLoading, setIsLoading] = useState(true);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
||||||
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
||||||
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
||||||
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
||||||
|
const [contextWindowSize, setContextWindowSize] = useState(2048);
|
||||||
|
const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
|
||||||
|
const predefinedContextSizes = [
|
||||||
|
1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072,
|
||||||
|
];
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
|
setIsLoading(true);
|
||||||
const res = await fetch(`/api/config`, {
|
const res = await fetch(`/api/config`, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@ -158,6 +165,7 @@ const Page = () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const data = (await res.json()) as SettingsType;
|
const data = (await res.json()) as SettingsType;
|
||||||
|
|
||||||
setConfig(data);
|
setConfig(data);
|
||||||
|
|
||||||
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
|
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
|
||||||
@ -206,6 +214,13 @@ const Page = () => {
|
|||||||
setAutomaticVideoSearch(
|
setAutomaticVideoSearch(
|
||||||
localStorage.getItem('autoVideoSearch') === 'true',
|
localStorage.getItem('autoVideoSearch') === 'true',
|
||||||
);
|
);
|
||||||
|
const storedContextWindow = parseInt(
|
||||||
|
localStorage.getItem('ollamaContextWindow') ?? '2048',
|
||||||
|
);
|
||||||
|
setContextWindowSize(storedContextWindow);
|
||||||
|
setIsCustomContextWindow(
|
||||||
|
!predefinedContextSizes.includes(storedContextWindow),
|
||||||
|
);
|
||||||
|
|
||||||
setSystemInstructions(localStorage.getItem('systemInstructions')!);
|
setSystemInstructions(localStorage.getItem('systemInstructions')!);
|
||||||
|
|
||||||
@ -365,6 +380,8 @@ const Page = () => {
|
|||||||
localStorage.setItem('embeddingModelProvider', value);
|
localStorage.setItem('embeddingModelProvider', value);
|
||||||
} else if (key === 'embeddingModel') {
|
} else if (key === 'embeddingModel') {
|
||||||
localStorage.setItem('embeddingModel', value);
|
localStorage.setItem('embeddingModel', value);
|
||||||
|
} else if (key === 'ollamaContextWindow') {
|
||||||
|
localStorage.setItem('ollamaContextWindow', value.toString());
|
||||||
} else if (key === 'systemInstructions') {
|
} else if (key === 'systemInstructions') {
|
||||||
localStorage.setItem('systemInstructions', value);
|
localStorage.setItem('systemInstructions', value);
|
||||||
}
|
}
|
||||||
@ -598,6 +615,78 @@ const Page = () => {
|
|||||||
];
|
];
|
||||||
})()}
|
})()}
|
||||||
/>
|
/>
|
||||||
|
{selectedChatModelProvider === 'ollama' && (
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Chat Context Window Size
|
||||||
|
</p>
|
||||||
|
<Select
|
||||||
|
value={
|
||||||
|
isCustomContextWindow
|
||||||
|
? 'custom'
|
||||||
|
: contextWindowSize.toString()
|
||||||
|
}
|
||||||
|
onChange={(e) => {
|
||||||
|
const value = e.target.value;
|
||||||
|
if (value === 'custom') {
|
||||||
|
setIsCustomContextWindow(true);
|
||||||
|
} else {
|
||||||
|
setIsCustomContextWindow(false);
|
||||||
|
const numValue = parseInt(value);
|
||||||
|
setContextWindowSize(numValue);
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
ollamaContextWindow: numValue,
|
||||||
|
}));
|
||||||
|
saveConfig('ollamaContextWindow', numValue);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
options={[
|
||||||
|
...predefinedContextSizes.map((size) => ({
|
||||||
|
value: size.toString(),
|
||||||
|
label: `${size.toLocaleString()} tokens`,
|
||||||
|
})),
|
||||||
|
{ value: 'custom', label: 'Custom...' },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
{isCustomContextWindow && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<Input
|
||||||
|
type="number"
|
||||||
|
min={512}
|
||||||
|
value={contextWindowSize}
|
||||||
|
placeholder="Custom context window size (minimum 512)"
|
||||||
|
isSaving={savingStates['ollamaContextWindow']}
|
||||||
|
onChange={(e) => {
|
||||||
|
// Allow any value to be typed
|
||||||
|
const value =
|
||||||
|
parseInt(e.target.value) ||
|
||||||
|
contextWindowSize;
|
||||||
|
setContextWindowSize(value);
|
||||||
|
}}
|
||||||
|
onSave={(value) => {
|
||||||
|
// Validate only when saving
|
||||||
|
const numValue = Math.max(
|
||||||
|
512,
|
||||||
|
parseInt(value) || 2048,
|
||||||
|
);
|
||||||
|
setContextWindowSize(numValue);
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
ollamaContextWindow: numValue,
|
||||||
|
}));
|
||||||
|
saveConfig('ollamaContextWindow', numValue);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
|
||||||
|
{isCustomContextWindow
|
||||||
|
? 'Adjust the context window size for Ollama models (minimum 512 tokens)'
|
||||||
|
: 'Adjust the context window size for Ollama models'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
@ -16,6 +16,8 @@ const Chat = ({
|
|||||||
setFileIds,
|
setFileIds,
|
||||||
files,
|
files,
|
||||||
setFiles,
|
setFiles,
|
||||||
|
optimizationMode,
|
||||||
|
setOptimizationMode,
|
||||||
}: {
|
}: {
|
||||||
messages: Message[];
|
messages: Message[];
|
||||||
sendMessage: (message: string) => void;
|
sendMessage: (message: string) => void;
|
||||||
@ -26,6 +28,8 @@ const Chat = ({
|
|||||||
setFileIds: (fileIds: string[]) => void;
|
setFileIds: (fileIds: string[]) => void;
|
||||||
files: File[];
|
files: File[];
|
||||||
setFiles: (files: File[]) => void;
|
setFiles: (files: File[]) => void;
|
||||||
|
optimizationMode: string;
|
||||||
|
setOptimizationMode: (mode: string) => void;
|
||||||
}) => {
|
}) => {
|
||||||
const [dividerWidth, setDividerWidth] = useState(0);
|
const [dividerWidth, setDividerWidth] = useState(0);
|
||||||
const dividerRef = useRef<HTMLDivElement | null>(null);
|
const dividerRef = useRef<HTMLDivElement | null>(null);
|
||||||
@ -99,6 +103,8 @@ const Chat = ({
|
|||||||
setFileIds={setFileIds}
|
setFileIds={setFileIds}
|
||||||
files={files}
|
files={files}
|
||||||
setFiles={setFiles}
|
setFiles={setFiles}
|
||||||
|
optimizationMode={optimizationMode}
|
||||||
|
setOptimizationMode={setOptimizationMode}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
@ -287,6 +287,16 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
|
|
||||||
const [notFound, setNotFound] = useState(false);
|
const [notFound, setNotFound] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const savedOptimizationMode = localStorage.getItem('optimizationMode');
|
||||||
|
|
||||||
|
if (savedOptimizationMode !== null) {
|
||||||
|
setOptimizationMode(savedOptimizationMode);
|
||||||
|
} else {
|
||||||
|
localStorage.setItem('optimizationMode', optimizationMode);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (
|
if (
|
||||||
chatId &&
|
chatId &&
|
||||||
@ -327,7 +337,11 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
}
|
}
|
||||||
}, [isMessagesLoaded, isConfigReady]);
|
}, [isMessagesLoaded, isConfigReady]);
|
||||||
|
|
||||||
const sendMessage = async (message: string, messageId?: string) => {
|
const sendMessage = async (
|
||||||
|
message: string,
|
||||||
|
messageId?: string,
|
||||||
|
options?: { rewriteIndex?: number },
|
||||||
|
) => {
|
||||||
if (loading) return;
|
if (loading) return;
|
||||||
if (!isConfigReady) {
|
if (!isConfigReady) {
|
||||||
toast.error('Cannot send message before the configuration is ready');
|
toast.error('Cannot send message before the configuration is ready');
|
||||||
@ -340,6 +354,20 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
let sources: Document[] | undefined = undefined;
|
let sources: Document[] | undefined = undefined;
|
||||||
let recievedMessage = '';
|
let recievedMessage = '';
|
||||||
let added = false;
|
let added = false;
|
||||||
|
let messageChatHistory = chatHistory;
|
||||||
|
|
||||||
|
if (options?.rewriteIndex !== undefined) {
|
||||||
|
const rewriteIndex = options.rewriteIndex;
|
||||||
|
setMessages((prev) => {
|
||||||
|
return [...prev.slice(0, messages.length > 2 ? rewriteIndex - 1 : 0)];
|
||||||
|
});
|
||||||
|
|
||||||
|
messageChatHistory = chatHistory.slice(
|
||||||
|
0,
|
||||||
|
messages.length > 2 ? rewriteIndex - 1 : 0,
|
||||||
|
);
|
||||||
|
setChatHistory(messageChatHistory);
|
||||||
|
}
|
||||||
|
|
||||||
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
|
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
|
||||||
|
|
||||||
@ -455,6 +483,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch('/api/chat', {
|
const res = await fetch('/api/chat', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@ -471,10 +502,13 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
files: fileIds,
|
files: fileIds,
|
||||||
focusMode: focusMode,
|
focusMode: focusMode,
|
||||||
optimizationMode: optimizationMode,
|
optimizationMode: optimizationMode,
|
||||||
history: chatHistory,
|
history: messageChatHistory,
|
||||||
chatModel: {
|
chatModel: {
|
||||||
name: chatModelProvider.name,
|
name: chatModelProvider.name,
|
||||||
provider: chatModelProvider.provider,
|
provider: chatModelProvider.provider,
|
||||||
|
...(chatModelProvider.provider === 'ollama' && {
|
||||||
|
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
embeddingModel: {
|
embeddingModel: {
|
||||||
name: embeddingModelProvider.name,
|
name: embeddingModelProvider.name,
|
||||||
@ -512,20 +546,13 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const rewrite = (messageId: string) => {
|
const rewrite = (messageId: string) => {
|
||||||
const index = messages.findIndex((msg) => msg.messageId === messageId);
|
const messageIndex = messages.findIndex(
|
||||||
|
(msg) => msg.messageId === messageId,
|
||||||
if (index === -1) return;
|
);
|
||||||
|
if (messageIndex == -1) return;
|
||||||
const message = messages[index - 1];
|
sendMessage(messages[messageIndex - 1].content, messageId, {
|
||||||
|
rewriteIndex: messageIndex,
|
||||||
setMessages((prev) => {
|
|
||||||
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
|
|
||||||
});
|
});
|
||||||
setChatHistory((prev) => {
|
|
||||||
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
|
|
||||||
});
|
|
||||||
|
|
||||||
sendMessage(message.content, message.messageId);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@ -570,6 +597,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
setFileIds={setFileIds}
|
setFileIds={setFileIds}
|
||||||
files={files}
|
files={files}
|
||||||
setFiles={setFiles}
|
setFiles={setFiles}
|
||||||
|
optimizationMode={optimizationMode}
|
||||||
|
setOptimizationMode={setOptimizationMode}
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
|
@ -4,6 +4,7 @@ import { useEffect, useRef, useState } from 'react';
|
|||||||
import TextareaAutosize from 'react-textarea-autosize';
|
import TextareaAutosize from 'react-textarea-autosize';
|
||||||
import Attach from './MessageInputActions/Attach';
|
import Attach from './MessageInputActions/Attach';
|
||||||
import CopilotToggle from './MessageInputActions/Copilot';
|
import CopilotToggle from './MessageInputActions/Copilot';
|
||||||
|
import Optimization from './MessageInputActions/Optimization';
|
||||||
import { File } from './ChatWindow';
|
import { File } from './ChatWindow';
|
||||||
import AttachSmall from './MessageInputActions/AttachSmall';
|
import AttachSmall from './MessageInputActions/AttachSmall';
|
||||||
|
|
||||||
@ -14,6 +15,8 @@ const MessageInput = ({
|
|||||||
setFileIds,
|
setFileIds,
|
||||||
files,
|
files,
|
||||||
setFiles,
|
setFiles,
|
||||||
|
optimizationMode,
|
||||||
|
setOptimizationMode,
|
||||||
}: {
|
}: {
|
||||||
sendMessage: (message: string) => void;
|
sendMessage: (message: string) => void;
|
||||||
loading: boolean;
|
loading: boolean;
|
||||||
@ -21,6 +24,8 @@ const MessageInput = ({
|
|||||||
setFileIds: (fileIds: string[]) => void;
|
setFileIds: (fileIds: string[]) => void;
|
||||||
files: File[];
|
files: File[];
|
||||||
setFiles: (files: File[]) => void;
|
setFiles: (files: File[]) => void;
|
||||||
|
optimizationMode: string;
|
||||||
|
setOptimizationMode: (mode: string) => void;
|
||||||
}) => {
|
}) => {
|
||||||
const [copilotEnabled, setCopilotEnabled] = useState(false);
|
const [copilotEnabled, setCopilotEnabled] = useState(false);
|
||||||
const [message, setMessage] = useState('');
|
const [message, setMessage] = useState('');
|
||||||
@ -40,20 +45,16 @@ const MessageInput = ({
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleKeyDown = (e: KeyboardEvent) => {
|
const handleKeyDown = (e: KeyboardEvent) => {
|
||||||
const activeElement = document.activeElement;
|
const activeElement = document.activeElement;
|
||||||
|
|
||||||
const isInputFocused =
|
const isInputFocused =
|
||||||
activeElement?.tagName === 'INPUT' ||
|
activeElement?.tagName === 'INPUT' ||
|
||||||
activeElement?.tagName === 'TEXTAREA' ||
|
activeElement?.tagName === 'TEXTAREA' ||
|
||||||
activeElement?.hasAttribute('contenteditable');
|
activeElement?.hasAttribute('contenteditable');
|
||||||
|
|
||||||
if (e.key === '/' && !isInputFocused) {
|
if (e.key === '/' && !isInputFocused) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
inputRef.current?.focus();
|
inputRef.current?.focus();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
document.addEventListener('keydown', handleKeyDown);
|
document.addEventListener('keydown', handleKeyDown);
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
document.removeEventListener('keydown', handleKeyDown);
|
document.removeEventListener('keydown', handleKeyDown);
|
||||||
};
|
};
|
||||||
@ -75,58 +76,95 @@ const MessageInput = ({
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={cn(
|
className={cn(
|
||||||
'bg-light-secondary dark:bg-dark-secondary p-4 flex items-center overflow-hidden border border-light-200 dark:border-dark-200',
|
'bg-light-secondary dark:bg-dark-secondary p-4 flex items-center border border-light-200 dark:border-dark-200',
|
||||||
mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full',
|
mode === 'multi'
|
||||||
|
? 'flex-col rounded-lg'
|
||||||
|
: 'flex-col md:flex-row rounded-lg md:rounded-full',
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{mode === 'single' && (
|
{mode === 'single' && (
|
||||||
<AttachSmall
|
<div className="flex flex-row items-center justify-between w-full mb-2 md:mb-0 md:w-auto">
|
||||||
fileIds={fileIds}
|
<div className="flex flex-row items-center space-x-2">
|
||||||
setFileIds={setFileIds}
|
<AttachSmall
|
||||||
files={files}
|
fileIds={fileIds}
|
||||||
setFiles={setFiles}
|
setFileIds={setFileIds}
|
||||||
/>
|
files={files}
|
||||||
)}
|
setFiles={setFiles}
|
||||||
<TextareaAutosize
|
/>
|
||||||
ref={inputRef}
|
<Optimization
|
||||||
value={message}
|
optimizationMode={optimizationMode}
|
||||||
onChange={(e) => setMessage(e.target.value)}
|
setOptimizationMode={setOptimizationMode}
|
||||||
onHeightChange={(height, props) => {
|
/>
|
||||||
setTextareaRows(Math.ceil(height / props.rowHeight));
|
</div>
|
||||||
}}
|
<div className="md:hidden">
|
||||||
className="transition bg-transparent dark:placeholder:text-white/50 placeholder:text-sm text-sm dark:text-white resize-none focus:outline-none w-full px-2 max-h-24 lg:max-h-36 xl:max-h-48 flex-grow flex-shrink"
|
|
||||||
placeholder="Ask a follow-up"
|
|
||||||
/>
|
|
||||||
{mode === 'single' && (
|
|
||||||
<div className="flex flex-row items-center space-x-4">
|
|
||||||
<CopilotToggle
|
|
||||||
copilotEnabled={copilotEnabled}
|
|
||||||
setCopilotEnabled={setCopilotEnabled}
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
disabled={message.trim().length === 0 || loading}
|
|
||||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
|
||||||
>
|
|
||||||
<ArrowUp className="bg-background" size={17} />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{mode === 'multi' && (
|
|
||||||
<div className="flex flex-row items-center justify-between w-full pt-2">
|
|
||||||
<AttachSmall
|
|
||||||
fileIds={fileIds}
|
|
||||||
setFileIds={setFileIds}
|
|
||||||
files={files}
|
|
||||||
setFiles={setFiles}
|
|
||||||
/>
|
|
||||||
<div className="flex flex-row items-center space-x-4">
|
|
||||||
<CopilotToggle
|
<CopilotToggle
|
||||||
copilotEnabled={copilotEnabled}
|
copilotEnabled={copilotEnabled}
|
||||||
setCopilotEnabled={setCopilotEnabled}
|
setCopilotEnabled={setCopilotEnabled}
|
||||||
/>
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex flex-row items-center w-full">
|
||||||
|
<TextareaAutosize
|
||||||
|
ref={inputRef}
|
||||||
|
value={message}
|
||||||
|
onChange={(e) => setMessage(e.target.value)}
|
||||||
|
onHeightChange={(height, props) => {
|
||||||
|
setTextareaRows(Math.ceil(height / props.rowHeight));
|
||||||
|
}}
|
||||||
|
className="transition bg-transparent dark:placeholder:text-white/50 placeholder:text-sm text-sm dark:text-white resize-none focus:outline-none w-full px-2 max-h-24 lg:max-h-36 xl:max-h-48 flex-grow flex-shrink"
|
||||||
|
placeholder="Ask a follow-up"
|
||||||
|
/>
|
||||||
|
{mode === 'single' && (
|
||||||
|
<div className="flex flex-row items-center space-x-4">
|
||||||
|
<div className="hidden md:block">
|
||||||
|
<CopilotToggle
|
||||||
|
copilotEnabled={copilotEnabled}
|
||||||
|
setCopilotEnabled={setCopilotEnabled}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<button
|
<button
|
||||||
disabled={message.trim().length === 0 || loading}
|
disabled={message.trim().length === 0 || loading}
|
||||||
className="bg-[#24A0ED] text-white text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||||
|
>
|
||||||
|
<ArrowUp className="bg-background" size={17} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{mode === 'multi' && (
|
||||||
|
<div className="flex flex-col md:flex-row items-start md:items-center justify-between w-full pt-2">
|
||||||
|
<div className="flex flex-row items-center justify-between w-full md:w-auto mb-2 md:mb-0">
|
||||||
|
<div className="flex flex-row items-center space-x-2">
|
||||||
|
<AttachSmall
|
||||||
|
fileIds={fileIds}
|
||||||
|
setFileIds={setFileIds}
|
||||||
|
files={files}
|
||||||
|
setFiles={setFiles}
|
||||||
|
/>
|
||||||
|
<Optimization
|
||||||
|
optimizationMode={optimizationMode}
|
||||||
|
setOptimizationMode={setOptimizationMode}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="md:hidden">
|
||||||
|
<CopilotToggle
|
||||||
|
copilotEnabled={copilotEnabled}
|
||||||
|
setCopilotEnabled={setCopilotEnabled}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-row items-center space-x-4 self-end">
|
||||||
|
<div className="hidden md:block">
|
||||||
|
<CopilotToggle
|
||||||
|
copilotEnabled={copilotEnabled}
|
||||||
|
setCopilotEnabled={setCopilotEnabled}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
disabled={message.trim().length === 0 || loading}
|
||||||
|
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||||
>
|
>
|
||||||
<ArrowUp className="bg-background" size={17} />
|
<ArrowUp className="bg-background" size={17} />
|
||||||
</button>
|
</button>
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { ChevronDown, Sliders, Star, Zap } from 'lucide-react';
|
import { ChevronDown, Minimize2, Sliders, Star, Zap } from 'lucide-react';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import {
|
import {
|
||||||
Popover,
|
Popover,
|
||||||
@ -7,7 +7,6 @@ import {
|
|||||||
Transition,
|
Transition,
|
||||||
} from '@headlessui/react';
|
} from '@headlessui/react';
|
||||||
import { Fragment } from 'react';
|
import { Fragment } from 'react';
|
||||||
|
|
||||||
const OptimizationModes = [
|
const OptimizationModes = [
|
||||||
{
|
{
|
||||||
key: 'speed',
|
key: 'speed',
|
||||||
@ -41,8 +40,13 @@ const Optimization = ({
|
|||||||
optimizationMode: string;
|
optimizationMode: string;
|
||||||
setOptimizationMode: (mode: string) => void;
|
setOptimizationMode: (mode: string) => void;
|
||||||
}) => {
|
}) => {
|
||||||
|
const handleOptimizationChange = (mode: string) => {
|
||||||
|
setOptimizationMode(mode);
|
||||||
|
localStorage.setItem('optimizationMode', mode);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
<Popover className="relative">
|
||||||
<PopoverButton
|
<PopoverButton
|
||||||
type="button"
|
type="button"
|
||||||
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||||
@ -70,11 +74,11 @@ const Optimization = ({
|
|||||||
leaveFrom="opacity-100 translate-y-0"
|
leaveFrom="opacity-100 translate-y-0"
|
||||||
leaveTo="opacity-0 translate-y-1"
|
leaveTo="opacity-0 translate-y-1"
|
||||||
>
|
>
|
||||||
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] right-0">
|
<PopoverPanel className="absolute z-10 bottom-[100%] mb-2 left-1/2 transform -translate-x-1/2">
|
||||||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-max max-w-[300px] p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||||
{OptimizationModes.map((mode, i) => (
|
{OptimizationModes.map((mode, i) => (
|
||||||
<PopoverButton
|
<PopoverButton
|
||||||
onClick={() => setOptimizationMode(mode.key)}
|
onClick={() => handleOptimizationChange(mode.key)}
|
||||||
key={i}
|
key={i}
|
||||||
disabled={mode.key === 'quality'}
|
disabled={mode.key === 'quality'}
|
||||||
className={cn(
|
className={cn(
|
||||||
|
@ -1,122 +1,8 @@
|
|||||||
import { Clock, Edit, Share, Trash, FileText, FileDown } from 'lucide-react';
|
import { Clock, Edit, Share, Trash } from 'lucide-react';
|
||||||
import { Message } from './ChatWindow';
|
import { Message } from './ChatWindow';
|
||||||
import { useEffect, useState, Fragment } from 'react';
|
import { useEffect, useState } from 'react';
|
||||||
import { formatTimeDifference } from '@/lib/utils';
|
import { formatTimeDifference } from '@/lib/utils';
|
||||||
import DeleteChat from './DeleteChat';
|
import DeleteChat from './DeleteChat';
|
||||||
import {
|
|
||||||
Popover,
|
|
||||||
PopoverButton,
|
|
||||||
PopoverPanel,
|
|
||||||
Transition,
|
|
||||||
} from '@headlessui/react';
|
|
||||||
import jsPDF from 'jspdf';
|
|
||||||
|
|
||||||
const downloadFile = (filename: string, content: string, type: string) => {
|
|
||||||
const blob = new Blob([content], { type });
|
|
||||||
const url = URL.createObjectURL(blob);
|
|
||||||
const a = document.createElement('a');
|
|
||||||
a.href = url;
|
|
||||||
a.download = filename;
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
setTimeout(() => {
|
|
||||||
document.body.removeChild(a);
|
|
||||||
URL.revokeObjectURL(url);
|
|
||||||
}, 0);
|
|
||||||
};
|
|
||||||
|
|
||||||
const exportAsMarkdown = (messages: Message[], title: string) => {
|
|
||||||
const date = new Date(messages[0]?.createdAt || Date.now()).toLocaleString();
|
|
||||||
let md = `# 💬 Chat Export: ${title}\n\n`;
|
|
||||||
md += `*Exported on: ${date}*\n\n---\n`;
|
|
||||||
messages.forEach((msg, idx) => {
|
|
||||||
md += `\n---\n`;
|
|
||||||
md += `**${msg.role === 'user' ? '🧑 User' : '🤖 Assistant'}**
|
|
||||||
`;
|
|
||||||
md += `*${new Date(msg.createdAt).toLocaleString()}*\n\n`;
|
|
||||||
md += `> ${msg.content.replace(/\n/g, '\n> ')}\n`;
|
|
||||||
if (msg.sources && msg.sources.length > 0) {
|
|
||||||
md += `\n**Citations:**\n`;
|
|
||||||
msg.sources.forEach((src: any, i: number) => {
|
|
||||||
const url = src.metadata?.url || '';
|
|
||||||
md += `- [${i + 1}] [${url}](${url})\n`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
md += '\n---\n';
|
|
||||||
downloadFile(`${title || 'chat'}.md`, md, 'text/markdown');
|
|
||||||
};
|
|
||||||
|
|
||||||
const exportAsPDF = (messages: Message[], title: string) => {
|
|
||||||
const doc = new jsPDF();
|
|
||||||
const date = new Date(messages[0]?.createdAt || Date.now()).toLocaleString();
|
|
||||||
let y = 15;
|
|
||||||
const pageHeight = doc.internal.pageSize.height;
|
|
||||||
doc.setFontSize(18);
|
|
||||||
doc.text(`Chat Export: ${title}`, 10, y);
|
|
||||||
y += 8;
|
|
||||||
doc.setFontSize(11);
|
|
||||||
doc.setTextColor(100);
|
|
||||||
doc.text(`Exported on: ${date}`, 10, y);
|
|
||||||
y += 8;
|
|
||||||
doc.setDrawColor(200);
|
|
||||||
doc.line(10, y, 200, y);
|
|
||||||
y += 6;
|
|
||||||
doc.setTextColor(30);
|
|
||||||
messages.forEach((msg, idx) => {
|
|
||||||
if (y > pageHeight - 30) {
|
|
||||||
doc.addPage();
|
|
||||||
y = 15;
|
|
||||||
}
|
|
||||||
doc.setFont('helvetica', 'bold');
|
|
||||||
doc.text(`${msg.role === 'user' ? 'User' : 'Assistant'}`, 10, y);
|
|
||||||
doc.setFont('helvetica', 'normal');
|
|
||||||
doc.setFontSize(10);
|
|
||||||
doc.setTextColor(120);
|
|
||||||
doc.text(`${new Date(msg.createdAt).toLocaleString()}`, 40, y);
|
|
||||||
y += 6;
|
|
||||||
doc.setTextColor(30);
|
|
||||||
doc.setFontSize(12);
|
|
||||||
const lines = doc.splitTextToSize(msg.content, 180);
|
|
||||||
for (let i = 0; i < lines.length; i++) {
|
|
||||||
if (y > pageHeight - 20) {
|
|
||||||
doc.addPage();
|
|
||||||
y = 15;
|
|
||||||
}
|
|
||||||
doc.text(lines[i], 12, y);
|
|
||||||
y += 6;
|
|
||||||
}
|
|
||||||
if (msg.sources && msg.sources.length > 0) {
|
|
||||||
doc.setFontSize(11);
|
|
||||||
doc.setTextColor(80);
|
|
||||||
if (y > pageHeight - 20) {
|
|
||||||
doc.addPage();
|
|
||||||
y = 15;
|
|
||||||
}
|
|
||||||
doc.text('Citations:', 12, y);
|
|
||||||
y += 5;
|
|
||||||
msg.sources.forEach((src: any, i: number) => {
|
|
||||||
const url = src.metadata?.url || '';
|
|
||||||
if (y > pageHeight - 15) {
|
|
||||||
doc.addPage();
|
|
||||||
y = 15;
|
|
||||||
}
|
|
||||||
doc.text(`- [${i + 1}] ${url}`, 15, y);
|
|
||||||
y += 5;
|
|
||||||
});
|
|
||||||
doc.setTextColor(30);
|
|
||||||
}
|
|
||||||
y += 6;
|
|
||||||
doc.setDrawColor(230);
|
|
||||||
if (y > pageHeight - 10) {
|
|
||||||
doc.addPage();
|
|
||||||
y = 15;
|
|
||||||
}
|
|
||||||
doc.line(10, y, 200, y);
|
|
||||||
y += 4;
|
|
||||||
});
|
|
||||||
doc.save(`${title || 'chat'}.pdf`);
|
|
||||||
};
|
|
||||||
|
|
||||||
const Navbar = ({
|
const Navbar = ({
|
||||||
chatId,
|
chatId,
|
||||||
@ -173,39 +59,10 @@ const Navbar = ({
|
|||||||
<p className="hidden lg:flex">{title}</p>
|
<p className="hidden lg:flex">{title}</p>
|
||||||
|
|
||||||
<div className="flex flex-row items-center space-x-4">
|
<div className="flex flex-row items-center space-x-4">
|
||||||
<Popover className="relative">
|
<Share
|
||||||
<PopoverButton className="active:scale-95 transition duration-100 cursor-pointer p-2 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary">
|
size={17}
|
||||||
<Share size={17} />
|
className="active:scale-95 transition duration-100 cursor-pointer"
|
||||||
</PopoverButton>
|
/>
|
||||||
<Transition
|
|
||||||
as={Fragment}
|
|
||||||
enter="transition ease-out duration-100"
|
|
||||||
enterFrom="opacity-0 translate-y-1"
|
|
||||||
enterTo="opacity-100 translate-y-0"
|
|
||||||
leave="transition ease-in duration-75"
|
|
||||||
leaveFrom="opacity-100 translate-y-0"
|
|
||||||
leaveTo="opacity-0 translate-y-1"
|
|
||||||
>
|
|
||||||
<PopoverPanel className="absolute right-0 mt-2 w-64 rounded-xl shadow-xl bg-light-primary dark:bg-dark-primary border border-light-200 dark:border-dark-200 z-50">
|
|
||||||
<div className="flex flex-col py-3 px-3 gap-2">
|
|
||||||
<button
|
|
||||||
className="flex items-center gap-2 px-4 py-2 text-left hover:bg-light-secondary dark:hover:bg-dark-secondary transition-colors text-black dark:text-white rounded-lg font-medium"
|
|
||||||
onClick={() => exportAsMarkdown(messages, title || '')}
|
|
||||||
>
|
|
||||||
<FileText size={17} className="text-[#24A0ED]" />
|
|
||||||
Export as Markdown
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="flex items-center gap-2 px-4 py-2 text-left hover:bg-light-secondary dark:hover:bg-dark-secondary transition-colors text-black dark:text-white rounded-lg font-medium"
|
|
||||||
onClick={() => exportAsPDF(messages, title || '')}
|
|
||||||
>
|
|
||||||
<FileDown size={17} className="text-[#24A0ED]" />
|
|
||||||
Export as PDF
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</PopoverPanel>
|
|
||||||
</Transition>
|
|
||||||
</Popover>
|
|
||||||
<DeleteChat redirect chatId={chatId} chats={[]} setChats={() => {}} />
|
<DeleteChat redirect chatId={chatId} chats={[]} setChats={() => {}} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -35,9 +35,10 @@ const SearchImages = ({
|
|||||||
|
|
||||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||||
const chatModel = localStorage.getItem('chatModel');
|
const chatModel = localStorage.getItem('chatModel');
|
||||||
|
|
||||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||||
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch(`/api/images`, {
|
const res = await fetch(`/api/images`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@ -54,6 +55,9 @@ const SearchImages = ({
|
|||||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||||
customOpenAIKey: customOpenAIKey,
|
customOpenAIKey: customOpenAIKey,
|
||||||
}),
|
}),
|
||||||
|
...(chatModelProvider === 'ollama' && {
|
||||||
|
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
@ -50,9 +50,10 @@ const Searchvideos = ({
|
|||||||
|
|
||||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||||
const chatModel = localStorage.getItem('chatModel');
|
const chatModel = localStorage.getItem('chatModel');
|
||||||
|
|
||||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||||
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch(`/api/videos`, {
|
const res = await fetch(`/api/videos`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@ -69,6 +70,9 @@ const Searchvideos = ({
|
|||||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||||
customOpenAIKey: customOpenAIKey,
|
customOpenAIKey: customOpenAIKey,
|
||||||
}),
|
}),
|
||||||
|
...(chatModelProvider === 'ollama' && {
|
||||||
|
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
@ -6,6 +6,8 @@ export const getSuggestions = async (chatHisory: Message[]) => {
|
|||||||
|
|
||||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||||
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch(`/api/suggestions`, {
|
const res = await fetch(`/api/suggestions`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@ -21,6 +23,9 @@ export const getSuggestions = async (chatHisory: Message[]) => {
|
|||||||
customOpenAIKey,
|
customOpenAIKey,
|
||||||
customOpenAIBaseURL,
|
customOpenAIBaseURL,
|
||||||
}),
|
}),
|
||||||
|
...(chatModelProvider === 'ollama' && {
|
||||||
|
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
@ -3,8 +3,7 @@ import Database from 'better-sqlite3';
|
|||||||
import * as schema from './schema';
|
import * as schema from './schema';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
const DATA_DIR = process.env.DATA_DIR || process.cwd();
|
const sqlite = new Database(path.join(process.cwd(), 'data/db.sqlite'));
|
||||||
const sqlite = new Database(path.join(DATA_DIR, './data/db.sqlite'));
|
|
||||||
const db = drizzle(sqlite, {
|
const db = drizzle(sqlite, {
|
||||||
schema: schema,
|
schema: schema,
|
||||||
});
|
});
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
import db from './';
|
|
||||||
import { migrate } from 'drizzle-orm/better-sqlite3/migrator';
|
|
||||||
import path from 'path';
|
|
||||||
|
|
||||||
migrate(db, { migrationsFolder: path.join(process.cwd(), 'drizzle') });
|
|
@ -6,31 +6,101 @@ export const PROVIDER_INFO = {
|
|||||||
key: 'groq',
|
key: 'groq',
|
||||||
displayName: 'Groq',
|
displayName: 'Groq',
|
||||||
};
|
};
|
||||||
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
|
const groqChatModels: Record<string, string>[] = [
|
||||||
|
{
|
||||||
|
displayName: 'Gemma2 9B IT',
|
||||||
|
key: 'gemma2-9b-it',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.3 70B Versatile',
|
||||||
|
key: 'llama-3.3-70b-versatile',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.1 8B Instant',
|
||||||
|
key: 'llama-3.1-8b-instant',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama3 70B 8192',
|
||||||
|
key: 'llama3-70b-8192',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama3 8B 8192',
|
||||||
|
key: 'llama3-8b-8192',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Mixtral 8x7B 32768',
|
||||||
|
key: 'mixtral-8x7b-32768',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Qwen QWQ 32B (Preview)',
|
||||||
|
key: 'qwen-qwq-32b',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Mistral Saba 24B (Preview)',
|
||||||
|
key: 'mistral-saba-24b',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Qwen 2.5 Coder 32B (Preview)',
|
||||||
|
key: 'qwen-2.5-coder-32b',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Qwen 2.5 32B (Preview)',
|
||||||
|
key: 'qwen-2.5-32b',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'DeepSeek R1 Distill Qwen 32B (Preview)',
|
||||||
|
key: 'deepseek-r1-distill-qwen-32b',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'DeepSeek R1 Distill Llama 70B (Preview)',
|
||||||
|
key: 'deepseek-r1-distill-llama-70b',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.3 70B SpecDec (Preview)',
|
||||||
|
key: 'llama-3.3-70b-specdec',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.2 1B Preview (Preview)',
|
||||||
|
key: 'llama-3.2-1b-preview',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.2 3B Preview (Preview)',
|
||||||
|
key: 'llama-3.2-3b-preview',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.2 11B Vision Preview (Preview)',
|
||||||
|
key: 'llama-3.2-11b-vision-preview',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Llama 3.2 90B Vision Preview (Preview)',
|
||||||
|
key: 'llama-3.2-90b-vision-preview',
|
||||||
|
},
|
||||||
|
/* {
|
||||||
|
displayName: 'Llama 4 Maverick 17B 128E Instruct (Preview)',
|
||||||
|
key: 'meta-llama/llama-4-maverick-17b-128e-instruct',
|
||||||
|
}, */
|
||||||
|
{
|
||||||
|
displayName: 'Llama 4 Scout 17B 16E Instruct (Preview)',
|
||||||
|
key: 'meta-llama/llama-4-scout-17b-16e-instruct',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
export const loadGroqChatModels = async () => {
|
export const loadGroqChatModels = async () => {
|
||||||
const groqApiKey = getGroqApiKey();
|
const groqApiKey = getGroqApiKey();
|
||||||
|
|
||||||
if (!groqApiKey) return {};
|
if (!groqApiKey) return {};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await fetch('https://api.groq.com/openai/v1/models', {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Authorization: `bearer ${groqApiKey}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const groqChatModels = (await res.json()).data;
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
const chatModels: Record<string, ChatModel> = {};
|
||||||
|
|
||||||
groqChatModels.forEach((model: any) => {
|
groqChatModels.forEach((model) => {
|
||||||
chatModels[model.id] = {
|
chatModels[model.key] = {
|
||||||
displayName: model.id,
|
displayName: model.displayName,
|
||||||
model: new ChatOpenAI({
|
model: new ChatOpenAI({
|
||||||
openAIApiKey: groqApiKey,
|
openAIApiKey: groqApiKey,
|
||||||
modelName: model.id,
|
modelName: model.key,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
|
@ -6,8 +6,8 @@ export const PROVIDER_INFO = {
|
|||||||
key: 'ollama',
|
key: 'ollama',
|
||||||
displayName: 'Ollama',
|
displayName: 'Ollama',
|
||||||
};
|
};
|
||||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
import { ChatOllama } from '@langchain/ollama';
|
||||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
import { OllamaEmbeddings } from '@langchain/ollama';
|
||||||
|
|
||||||
export const loadOllamaChatModels = async () => {
|
export const loadOllamaChatModels = async () => {
|
||||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
||||||
|
@ -30,18 +30,6 @@ const openaiChatModels: Record<string, string>[] = [
|
|||||||
displayName: 'GPT-4 omni mini',
|
displayName: 'GPT-4 omni mini',
|
||||||
key: 'gpt-4o-mini',
|
key: 'gpt-4o-mini',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1 nano',
|
|
||||||
key: 'gpt-4.1-nano',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1 mini',
|
|
||||||
key: 'gpt-4.1-mini',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1',
|
|
||||||
key: 'gpt-4.1',
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const openaiEmbeddingModels: Record<string, string>[] = [
|
const openaiEmbeddingModels: Record<string, string>[] = [
|
||||||
|
@ -64,7 +64,7 @@ export const getDocumentsFromLinks = async ({ links }: { links: string[] }) => {
|
|||||||
const splittedText = await splitter.splitText(parsedText);
|
const splittedText = await splitter.splitText(parsedText);
|
||||||
const title = res.data
|
const title = res.data
|
||||||
.toString('utf8')
|
.toString('utf8')
|
||||||
.match(/<title.*>(.*?)<\/title>/)?.[1];
|
.match(/<title>(.*?)<\/title>/)?.[1];
|
||||||
|
|
||||||
const linkDocs = splittedText.map((text) => {
|
const linkDocs = splittedText.map((text) => {
|
||||||
return new Document({
|
return new Document({
|
||||||
|
Reference in New Issue
Block a user