mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-10-19 05:48:15 +00:00
Compare commits
1 Commits
feat/confi
...
develop/ne
Author | SHA1 | Date | |
---|---|---|---|
|
6f9eb3b1f3 |
Binary file not shown.
Before Width: | Height: | Size: 2.1 MiB After Width: | Height: | Size: 641 KiB |
36
.github/workflows/docker-build.yaml
vendored
36
.github/workflows/docker-build.yaml
vendored
@@ -4,7 +4,6 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- canary
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -44,19 +43,6 @@ jobs:
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:amd64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push AMD64 Canary Docker image
|
||||
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
|
||||
run: |
|
||||
DOCKERFILE=app.dockerfile
|
||||
IMAGE_NAME=perplexica
|
||||
docker buildx build --platform linux/amd64 \
|
||||
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
|
||||
--cache-to=type=inline \
|
||||
--provenance false \
|
||||
-f $DOCKERFILE \
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push AMD64 release Docker image
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
@@ -105,19 +91,6 @@ jobs:
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:arm64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push ARM64 Canary Docker image
|
||||
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
|
||||
run: |
|
||||
DOCKERFILE=app.dockerfile
|
||||
IMAGE_NAME=perplexica
|
||||
docker buildx build --platform linux/arm64 \
|
||||
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \
|
||||
--cache-to=type=inline \
|
||||
--provenance false \
|
||||
-f $DOCKERFILE \
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push ARM64 release Docker image
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
@@ -155,15 +128,6 @@ jobs:
|
||||
--amend itzcrazykns1337/${IMAGE_NAME}:arm64
|
||||
docker manifest push itzcrazykns1337/${IMAGE_NAME}:main
|
||||
|
||||
- name: Create and push multi-arch manifest for canary
|
||||
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
|
||||
run: |
|
||||
IMAGE_NAME=perplexica
|
||||
docker manifest create itzcrazykns1337/${IMAGE_NAME}:canary \
|
||||
--amend itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
|
||||
--amend itzcrazykns1337/${IMAGE_NAME}:canary-arm64
|
||||
docker manifest push itzcrazykns1337/${IMAGE_NAME}:canary
|
||||
|
||||
- name: Create and push multi-arch manifest for releases
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
|
@@ -15,6 +15,9 @@ COPY drizzle ./drizzle
|
||||
RUN mkdir -p /home/perplexica/data
|
||||
RUN yarn build
|
||||
|
||||
RUN yarn add --dev @vercel/ncc
|
||||
RUN yarn ncc build ./src/lib/db/migrate.ts -o migrator
|
||||
|
||||
FROM node:24.5.0-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/*
|
||||
@@ -27,6 +30,8 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static
|
||||
COPY --from=builder /home/perplexica/.next/standalone ./
|
||||
COPY --from=builder /home/perplexica/data ./data
|
||||
COPY drizzle ./drizzle
|
||||
COPY --from=builder /home/perplexica/migrator/build ./build
|
||||
COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js
|
||||
|
||||
RUN mkdir /home/perplexica/uploads
|
||||
|
||||
|
@@ -1,4 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
node migrate.js
|
||||
|
||||
exec node server.js
|
12
package.json
12
package.json
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"name": "perplexica-frontend",
|
||||
"version": "1.11.0-rc3",
|
||||
"version": "1.11.0-rc2",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"build": "npm run db:migrate && next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"format:write": "prettier . --write"
|
||||
"format:write": "prettier . --write",
|
||||
"db:migrate": "node ./src/lib/db/migrate.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@headlessui/tailwindcss": "^0.2.2",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||
"@langchain/anthropic": "^0.3.24",
|
||||
@@ -20,6 +20,7 @@
|
||||
"@langchain/core": "^0.3.66",
|
||||
"@langchain/google-genai": "^0.2.15",
|
||||
"@langchain/groq": "^0.2.3",
|
||||
"@langchain/langgraph": "^0.4.9",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@langchain/openai": "^0.6.2",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
@@ -29,8 +30,8 @@
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"clsx": "^2.1.0",
|
||||
"compute-cosine-similarity": "^1.1.0",
|
||||
"compute-dot": "^1.1.0",
|
||||
"drizzle-orm": "^0.40.1",
|
||||
"framer-motion": "^12.23.24",
|
||||
"html-to-text": "^9.0.5",
|
||||
"jspdf": "^3.0.1",
|
||||
"langchain": "^0.3.30",
|
||||
@@ -65,7 +66,6 @@
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.2.5",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
|
@@ -1,14 +1,23 @@
|
||||
import crypto from 'crypto';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { EventEmitter } from 'stream';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
import db from '@/lib/db';
|
||||
import { chats, messages as messagesSchema } from '@/lib/db/schema';
|
||||
import { and, eq, gt } from 'drizzle-orm';
|
||||
import { getFileDetails } from '@/lib/utils/files';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { searchHandlers } from '@/lib/search';
|
||||
import { z } from 'zod';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
@@ -19,30 +28,14 @@ const messageSchema = z.object({
|
||||
content: z.string().min(1, 'Message content is required'),
|
||||
});
|
||||
|
||||
const chatModelSchema: z.ZodType<ModelWithProvider> = z.object({
|
||||
providerId: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Chat model provider id must be provided',
|
||||
}),
|
||||
}),
|
||||
key: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Chat model key must be provided',
|
||||
}),
|
||||
}),
|
||||
const chatModelSchema = z.object({
|
||||
provider: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
});
|
||||
|
||||
const embeddingModelSchema: z.ZodType<ModelWithProvider> = z.object({
|
||||
providerId: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Embedding model provider id must be provided',
|
||||
}),
|
||||
}),
|
||||
key: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Embedding model key must be provided',
|
||||
}),
|
||||
}),
|
||||
const embeddingModelSchema = z.object({
|
||||
provider: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
});
|
||||
|
||||
const bodySchema = z.object({
|
||||
@@ -64,8 +57,8 @@ const bodySchema = z.object({
|
||||
.optional()
|
||||
.default([]),
|
||||
files: z.array(z.string()).optional().default([]),
|
||||
chatModel: chatModelSchema,
|
||||
embeddingModel: embeddingModelSchema,
|
||||
chatModel: chatModelSchema.optional().default({}),
|
||||
embeddingModel: embeddingModelSchema.optional().default({}),
|
||||
systemInstructions: z.string().nullable().optional().default(''),
|
||||
});
|
||||
|
||||
@@ -255,16 +248,56 @@ export const POST = async (req: Request) => {
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const [llm, embedding] = await Promise.all([
|
||||
registry.loadChatModel(body.chatModel.providerId, body.chatModel.key),
|
||||
registry.loadEmbeddingModel(
|
||||
body.embeddingModel.providerId,
|
||||
body.embeddingModel.key,
|
||||
),
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.name || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
const embeddingProvider =
|
||||
embeddingModelProviders[
|
||||
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0]
|
||||
];
|
||||
const embeddingModel =
|
||||
embeddingProvider[
|
||||
body.embeddingModel?.name || Object.keys(embeddingProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
let embedding = embeddingModel.model;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
if (!embedding) {
|
||||
return Response.json(
|
||||
{ error: 'Invalid embedding model' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const humanMessageId =
|
||||
message.messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
|
||||
|
@@ -1,76 +1,134 @@
|
||||
import configManager from '@/lib/config';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
import { ConfigModelProvider } from '@/lib/config/types';
|
||||
import {
|
||||
getAnthropicApiKey,
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
getGeminiApiKey,
|
||||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
getDeepseekApiKey,
|
||||
getAimlApiKey,
|
||||
getLMStudioApiEndpoint,
|
||||
getLemonadeApiEndpoint,
|
||||
getLemonadeApiKey,
|
||||
updateConfig,
|
||||
getOllamaApiKey,
|
||||
} from '@/lib/config';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
|
||||
type SaveConfigBody = {
|
||||
key: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
export const GET = async (req: NextRequest) => {
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const values = configManager.getCurrentConfig();
|
||||
const fields = configManager.getUIConfigSections();
|
||||
const config: Record<string, any> = {};
|
||||
|
||||
const modelRegistry = new ModelRegistry();
|
||||
const modelProviders = await modelRegistry.getActiveProviders();
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
values.modelProviders = values.modelProviders.map(
|
||||
(mp: ConfigModelProvider) => {
|
||||
const activeProvider = modelProviders.find((p) => p.id === mp.id);
|
||||
config['chatModelProviders'] = {};
|
||||
config['embeddingModelProviders'] = {};
|
||||
|
||||
for (const provider in chatModelProviders) {
|
||||
config['chatModelProviders'][provider] = Object.keys(
|
||||
chatModelProviders[provider],
|
||||
).map((model) => {
|
||||
return {
|
||||
...mp,
|
||||
chatModels: activeProvider?.chatModels ?? mp.chatModels,
|
||||
embeddingModels:
|
||||
activeProvider?.embeddingModels ?? mp.embeddingModels,
|
||||
name: model,
|
||||
displayName: chatModelProviders[provider][model].displayName,
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
return NextResponse.json({
|
||||
values,
|
||||
fields,
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Error in getting config: ', err);
|
||||
return Response.json(
|
||||
{ message: 'An error has occurred.' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const POST = async (req: NextRequest) => {
|
||||
try {
|
||||
const body: SaveConfigBody = await req.json();
|
||||
|
||||
if (!body.key || !body.value) {
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Key and value are required.',
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
configManager.updateConfig(body.key, body.value);
|
||||
for (const provider in embeddingModelProviders) {
|
||||
config['embeddingModelProviders'][provider] = Object.keys(
|
||||
embeddingModelProviders[provider],
|
||||
).map((model) => {
|
||||
return {
|
||||
name: model,
|
||||
displayName: embeddingModelProviders[provider][model].displayName,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Config updated successfully.',
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
config['openaiApiKey'] = getOpenaiApiKey();
|
||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||
config['ollamaApiKey'] = getOllamaApiKey();
|
||||
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
|
||||
config['lemonadeApiUrl'] = getLemonadeApiEndpoint();
|
||||
config['lemonadeApiKey'] = getLemonadeApiKey();
|
||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
config['geminiApiKey'] = getGeminiApiKey();
|
||||
config['deepseekApiKey'] = getDeepseekApiKey();
|
||||
config['aimlApiKey'] = getAimlApiKey();
|
||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||
|
||||
return Response.json({ ...config }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('Error in getting config: ', err);
|
||||
console.error('An error occurred while getting config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error has occurred.' },
|
||||
{ message: 'An error occurred while getting config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
try {
|
||||
const config = await req.json();
|
||||
|
||||
const updatedConfig = {
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: config.openaiApiKey,
|
||||
},
|
||||
GROQ: {
|
||||
API_KEY: config.groqApiKey,
|
||||
},
|
||||
ANTHROPIC: {
|
||||
API_KEY: config.anthropicApiKey,
|
||||
},
|
||||
GEMINI: {
|
||||
API_KEY: config.geminiApiKey,
|
||||
},
|
||||
OLLAMA: {
|
||||
API_URL: config.ollamaApiUrl,
|
||||
API_KEY: config.ollamaApiKey,
|
||||
},
|
||||
DEEPSEEK: {
|
||||
API_KEY: config.deepseekApiKey,
|
||||
},
|
||||
AIMLAPI: {
|
||||
API_KEY: config.aimlApiKey,
|
||||
},
|
||||
LM_STUDIO: {
|
||||
API_URL: config.lmStudioApiUrl,
|
||||
},
|
||||
LEMONADE: {
|
||||
API_URL: config.lemonadeApiUrl,
|
||||
API_KEY: config.lemonadeApiKey,
|
||||
},
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: config.customOpenaiApiUrl,
|
||||
API_KEY: config.customOpenaiApiKey,
|
||||
MODEL_NAME: config.customOpenaiModelName,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
updateConfig(updatedConfig);
|
||||
|
||||
return Response.json({ message: 'Config updated' }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('An error occurred while updating config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error occurred while updating config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
@@ -1,12 +1,23 @@
|
||||
import handleImageSearch from '@/lib/chains/imageSearchAgent';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface ImageSearchBody {
|
||||
query: string;
|
||||
chatHistory: any[];
|
||||
chatModel: ModelWithProvider;
|
||||
chatModel?: ChatModel;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@@ -23,12 +34,35 @@ export const POST = async (req: Request) => {
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const chatModelProviders = await getAvailableChatModelProviders();
|
||||
|
||||
const llm = await registry.loadChatModel(
|
||||
body.chatModel.providerId,
|
||||
body.chatModel.key,
|
||||
);
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.model || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
const images = await handleImageSearch(
|
||||
{
|
||||
|
47
src/app/api/models/route.ts
Normal file
47
src/app/api/models/route.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
Object.keys(chatModelProviders).forEach((provider) => {
|
||||
Object.keys(chatModelProviders[provider]).forEach((model) => {
|
||||
delete (chatModelProviders[provider][model] as { model?: unknown })
|
||||
.model;
|
||||
});
|
||||
});
|
||||
|
||||
Object.keys(embeddingModelProviders).forEach((provider) => {
|
||||
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
|
||||
delete (embeddingModelProviders[provider][model] as { model?: unknown })
|
||||
.model;
|
||||
});
|
||||
});
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
chatModelProviders,
|
||||
embeddingModelProviders,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while fetching models', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
@@ -1,94 +0,0 @@
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { Model } from '@/lib/models/types';
|
||||
import { NextRequest } from 'next/server';
|
||||
|
||||
export const POST = async (
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> },
|
||||
) => {
|
||||
try {
|
||||
const { id } = await params;
|
||||
|
||||
const body: Partial<Model> & { type: 'embedding' | 'chat' } =
|
||||
await req.json();
|
||||
|
||||
if (!body.key || !body.name) {
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Key and name must be provided',
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
await registry.addProviderModel(id, body.type, body);
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Model added successfully',
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while adding provider model', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const DELETE = async (
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> },
|
||||
) => {
|
||||
try {
|
||||
const { id } = await params;
|
||||
|
||||
const body: { key: string; type: 'embedding' | 'chat' } = await req.json();
|
||||
|
||||
if (!body.key) {
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Key and name must be provided',
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
await registry.removeProviderModel(id, body.type, body.key);
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Model added successfully',
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while deleting provider model', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
@@ -1,89 +0,0 @@
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { NextRequest } from 'next/server';
|
||||
|
||||
export const DELETE = async (
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> },
|
||||
) => {
|
||||
try {
|
||||
const { id } = await params;
|
||||
|
||||
if (!id) {
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Provider ID is required.',
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
await registry.removeProvider(id);
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Provider deleted successfully.',
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err: any) {
|
||||
console.error('An error occurred while deleting provider', err.message);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const PATCH = async (
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> },
|
||||
) => {
|
||||
try {
|
||||
const body = await req.json();
|
||||
const { name, config } = body;
|
||||
const { id } = await params;
|
||||
|
||||
if (!id || !name || !config) {
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Missing required fields.',
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const updatedProvider = await registry.updateProvider(id, name, config);
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
provider: updatedProvider,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err: any) {
|
||||
console.error('An error occurred while updating provider', err.message);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
@@ -1,74 +0,0 @@
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { NextRequest } from 'next/server';
|
||||
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const activeProviders = await registry.getActiveProviders();
|
||||
|
||||
const filteredProviders = activeProviders.filter((p) => {
|
||||
return !p.chatModels.some((m) => m.key === 'error');
|
||||
});
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
providers: filteredProviders,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while fetching providers', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const POST = async (req: NextRequest) => {
|
||||
try {
|
||||
const body = await req.json();
|
||||
const { type, name, config } = body;
|
||||
|
||||
if (!type || !name || !config) {
|
||||
return Response.json(
|
||||
{
|
||||
message: 'Missing required fields.',
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const newProvider = await registry.addProvider(type, name, config);
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
provider: newProvider,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while creating provider', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
@@ -1,14 +1,36 @@
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { MetaSearchAgentType } from '@/lib/search/metaSearchAgent';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { searchHandlers } from '@/lib/search';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
|
||||
interface chatModel {
|
||||
provider: string;
|
||||
name: string;
|
||||
customOpenAIKey?: string;
|
||||
customOpenAIBaseURL?: string;
|
||||
}
|
||||
|
||||
interface embeddingModel {
|
||||
provider: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface ChatRequestBody {
|
||||
optimizationMode: 'speed' | 'balanced';
|
||||
focusMode: string;
|
||||
chatModel: ModelWithProvider;
|
||||
embeddingModel: ModelWithProvider;
|
||||
chatModel?: chatModel;
|
||||
embeddingModel?: embeddingModel;
|
||||
query: string;
|
||||
history: Array<[string, string]>;
|
||||
stream?: boolean;
|
||||
@@ -36,16 +58,60 @@ export const POST = async (req: Request) => {
|
||||
: new AIMessage({ content: msg[1] });
|
||||
});
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const [llm, embeddings] = await Promise.all([
|
||||
registry.loadChatModel(body.chatModel.providerId, body.chatModel.key),
|
||||
registry.loadEmbeddingModel(
|
||||
body.embeddingModel.providerId,
|
||||
body.embeddingModel.key,
|
||||
),
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
const chatModelProvider =
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
|
||||
const chatModel =
|
||||
body.chatModel?.name ||
|
||||
Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
|
||||
const embeddingModelProvider =
|
||||
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0];
|
||||
const embeddingModel =
|
||||
body.embeddingModel?.name ||
|
||||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
let embeddings: Embeddings | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
modelName: body.chatModel?.name || getCustomOpenaiModelName(),
|
||||
apiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL:
|
||||
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (
|
||||
chatModelProviders[chatModelProvider] &&
|
||||
chatModelProviders[chatModelProvider][chatModel]
|
||||
) {
|
||||
llm = chatModelProviders[chatModelProvider][chatModel]
|
||||
.model as unknown as BaseChatModel | undefined;
|
||||
}
|
||||
|
||||
if (
|
||||
embeddingModelProviders[embeddingModelProvider] &&
|
||||
embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||
) {
|
||||
embeddings = embeddingModelProviders[embeddingModelProvider][
|
||||
embeddingModel
|
||||
].model as Embeddings | undefined;
|
||||
}
|
||||
|
||||
if (!llm || !embeddings) {
|
||||
return Response.json(
|
||||
{ message: 'Invalid model selected' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
||||
|
||||
if (!searchHandler) {
|
||||
|
@@ -1,12 +1,22 @@
|
||||
import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface SuggestionsGenerationBody {
|
||||
chatHistory: any[];
|
||||
chatModel: ModelWithProvider;
|
||||
chatModel?: ChatModel;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@@ -23,12 +33,35 @@ export const POST = async (req: Request) => {
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const chatModelProviders = await getAvailableChatModelProviders();
|
||||
|
||||
const llm = await registry.loadChatModel(
|
||||
body.chatModel.providerId,
|
||||
body.chatModel.key,
|
||||
);
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.model || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
const suggestions = await generateSuggestions(
|
||||
{
|
||||
|
@@ -2,11 +2,11 @@ import { NextResponse } from 'next/server';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { getAvailableEmbeddingModelProviders } from '@/lib/providers';
|
||||
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
|
||||
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { Document } from 'langchain/document';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
|
||||
interface FileRes {
|
||||
fileName: string;
|
||||
@@ -30,8 +30,8 @@ export async function POST(req: Request) {
|
||||
const formData = await req.formData();
|
||||
|
||||
const files = formData.getAll('files') as File[];
|
||||
const embedding_model = formData.get('embedding_model_key') as string;
|
||||
const embedding_model_provider = formData.get('embedding_model_provider_id') as string;
|
||||
const embedding_model = formData.get('embedding_model');
|
||||
const embedding_model_provider = formData.get('embedding_model_provider');
|
||||
|
||||
if (!embedding_model || !embedding_model_provider) {
|
||||
return NextResponse.json(
|
||||
@@ -40,9 +40,20 @@ export async function POST(req: Request) {
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const embeddingModels = await getAvailableEmbeddingModelProviders();
|
||||
const provider =
|
||||
embedding_model_provider ?? Object.keys(embeddingModels)[0];
|
||||
const embeddingModel =
|
||||
embedding_model ?? Object.keys(embeddingModels[provider as string])[0];
|
||||
|
||||
const model = await registry.loadEmbeddingModel(embedding_model_provider, embedding_model);
|
||||
let embeddingsModel =
|
||||
embeddingModels[provider as string]?.[embeddingModel as string]?.model;
|
||||
if (!embeddingsModel) {
|
||||
return NextResponse.json(
|
||||
{ message: 'Invalid embedding model selected' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const processedFiles: FileRes[] = [];
|
||||
|
||||
@@ -87,7 +98,7 @@ export async function POST(req: Request) {
|
||||
}),
|
||||
);
|
||||
|
||||
const embeddings = await model.embedDocuments(
|
||||
const embeddings = await embeddingsModel.embedDocuments(
|
||||
splitted.map((doc) => doc.pageContent),
|
||||
);
|
||||
const embeddingsDataPath = filePath.replace(
|
||||
|
@@ -1,12 +1,23 @@
|
||||
import handleVideoSearch from '@/lib/chains/videoSearchAgent';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface VideoSearchBody {
|
||||
query: string;
|
||||
chatHistory: any[];
|
||||
chatModel: ModelWithProvider;
|
||||
chatModel?: ChatModel;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@@ -23,12 +34,35 @@ export const POST = async (req: Request) => {
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const chatModelProviders = await getAvailableChatModelProviders();
|
||||
|
||||
const llm = await registry.loadChatModel(
|
||||
body.chatModel.providerId,
|
||||
body.chatModel.key,
|
||||
);
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.model || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
const videos = await handleVideoSearch(
|
||||
{
|
||||
|
1007
src/app/settings/page.tsx
Normal file
1007
src/app/settings/page.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,6 @@ import { Settings } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import NextError from 'next/error';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import Loader from './ui/Loader';
|
||||
|
||||
export interface BaseMessage {
|
||||
chatId: string;
|
||||
@@ -86,7 +85,22 @@ const ChatWindow = () => {
|
||||
)
|
||||
) : (
|
||||
<div className="flex flex-row items-center justify-center min-h-screen">
|
||||
<Loader />
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]"
|
||||
viewBox="0 0 100 101"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
<path
|
||||
d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z"
|
||||
fill="currentFill"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
@@ -5,8 +5,6 @@ import Focus from './MessageInputActions/Focus';
|
||||
import Optimization from './MessageInputActions/Optimization';
|
||||
import Attach from './MessageInputActions/Attach';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import AttachSmall from './MessageInputActions/AttachSmall';
|
||||
import ModelSelector from './MessageInputActions/ModelSelector';
|
||||
|
||||
const EmptyChatMessageInput = () => {
|
||||
const { sendMessage } = useChat();
|
||||
@@ -56,26 +54,25 @@ const EmptyChatMessageInput = () => {
|
||||
}}
|
||||
className="w-full"
|
||||
>
|
||||
<div className="flex flex-col bg-light-secondary dark:bg-dark-secondary px-3 pt-5 pb-3 rounded-2xl w-full border border-light-200 dark:border-dark-200 shadow-sm shadow-light-200/10 dark:shadow-black/20 transition-all duration-200 focus-within:border-light-300 dark:focus-within:border-dark-300">
|
||||
<div className="flex flex-col bg-light-secondary dark:bg-dark-secondary px-5 pt-5 pb-2 rounded-2xl w-full border border-light-200 dark:border-dark-200 shadow-sm shadow-light-200/10 dark:shadow-black/20 transition-all duration-200 focus-within:border-light-300 dark:focus-within:border-dark-300">
|
||||
<TextareaAutosize
|
||||
ref={inputRef}
|
||||
value={message}
|
||||
onChange={(e) => setMessage(e.target.value)}
|
||||
minRows={2}
|
||||
className="px-2 bg-transparent placeholder:text-[15px] placeholder:text-black/50 dark:placeholder:text-white/50 text-sm text-black dark:text-white resize-none focus:outline-none w-full max-h-24 lg:max-h-36 xl:max-h-48"
|
||||
className="bg-transparent placeholder:text-black/50 dark:placeholder:text-white/50 text-sm text-black dark:text-white resize-none focus:outline-none w-full max-h-24 lg:max-h-36 xl:max-h-48"
|
||||
placeholder="Ask anything..."
|
||||
/>
|
||||
<div className="flex flex-row items-center justify-between mt-4">
|
||||
<Optimization />
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
<ModelSelector />
|
||||
<Focus />
|
||||
<Attach />
|
||||
</div>
|
||||
<div className="flex flex-row items-center space-x-2 lg:space-x-4">
|
||||
<Focus />
|
||||
<Attach showText />
|
||||
</div>
|
||||
<div className="flex flex-row items-center space-x-1 sm:space-x-4">
|
||||
<Optimization />
|
||||
<button
|
||||
disabled={message.trim().length === 0}
|
||||
className="bg-sky-500 text-white disabled:text-black/50 dark:disabled:text-white/50 disabled:bg-[#e0e0dc] dark:disabled:bg-[#ececec21] hover:bg-opacity-85 transition duration-100 rounded-full p-2"
|
||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 disabled:bg-[#e0e0dc] dark:disabled:bg-[#ececec21] hover:bg-opacity-85 transition duration-100 rounded-full p-2"
|
||||
>
|
||||
<ArrowRight className="bg-background" size={17} />
|
||||
</button>
|
||||
|
@@ -5,19 +5,11 @@ import {
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import {
|
||||
CopyPlus,
|
||||
File,
|
||||
Link,
|
||||
LoaderCircle,
|
||||
Paperclip,
|
||||
Plus,
|
||||
Trash,
|
||||
} from 'lucide-react';
|
||||
import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react';
|
||||
import { Fragment, useRef, useState } from 'react';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
|
||||
const Attach = () => {
|
||||
const Attach = ({ showText }: { showText?: boolean }) => {
|
||||
const { files, setFiles, setFileIds, fileIds } = useChat();
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
@@ -32,12 +24,12 @@ const Attach = () => {
|
||||
}
|
||||
|
||||
const embeddingModelProvider = localStorage.getItem(
|
||||
'embeddingModelProviderId',
|
||||
'embeddingModelProvider',
|
||||
);
|
||||
const embeddingModel = localStorage.getItem('embeddingModelKey');
|
||||
const embeddingModel = localStorage.getItem('embeddingModel');
|
||||
|
||||
data.append('embedding_model_provider_id', embeddingModelProvider!);
|
||||
data.append('embedding_model_key', embeddingModel!);
|
||||
data.append('embedding_model_provider', embeddingModelProvider!);
|
||||
data.append('embedding_model', embeddingModel!);
|
||||
|
||||
const res = await fetch(`/api/uploads`, {
|
||||
method: 'POST',
|
||||
@@ -52,16 +44,42 @@ const Attach = () => {
|
||||
};
|
||||
|
||||
return loading ? (
|
||||
<div className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none text-black/50 dark:text-white/50 transition duration-200">
|
||||
<LoaderCircle size={16} className="text-sky-400 animate-spin" />
|
||||
<div className="flex flex-row items-center justify-between space-x-1">
|
||||
<LoaderCircle size={18} className="text-sky-400 animate-spin" />
|
||||
<p className="text-sky-400 inline whitespace-nowrap text-xs font-medium">
|
||||
Uploading..
|
||||
</p>
|
||||
</div>
|
||||
) : files.length > 0 ? (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
className={cn(
|
||||
'flex flex-row items-center justify-between space-x-1 p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white',
|
||||
files.length > 0 ? '-ml-2 lg:-ml-3' : '',
|
||||
)}
|
||||
>
|
||||
<File size={16} className="text-sky-400" />
|
||||
{files.length > 1 && (
|
||||
<>
|
||||
<File size={19} className="text-sky-400" />
|
||||
<p className="text-sky-400 inline whitespace-nowrap text-xs font-medium">
|
||||
{files.length} files
|
||||
</p>
|
||||
</>
|
||||
)}
|
||||
|
||||
{files.length === 1 && (
|
||||
<>
|
||||
<File size={18} className="text-sky-400" />
|
||||
<p className="text-sky-400 text-xs font-medium">
|
||||
{files[0].fileName.length > 10
|
||||
? files[0].fileName.replace(/\.\w+$/, '').substring(0, 3) +
|
||||
'...' +
|
||||
files[0].fileExtension
|
||||
: files[0].fileName}
|
||||
</p>
|
||||
</>
|
||||
)}
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
@@ -82,7 +100,7 @@ const Attach = () => {
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200 focus:outline-none"
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
@@ -92,7 +110,7 @@ const Attach = () => {
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Plus size={16} />
|
||||
<Plus size={18} />
|
||||
<p className="text-xs">Add</p>
|
||||
</button>
|
||||
<button
|
||||
@@ -100,7 +118,7 @@ const Attach = () => {
|
||||
setFiles([]);
|
||||
setFileIds([]);
|
||||
}}
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200 focus:outline-none"
|
||||
className="flex flex-row items-center space-x-1 text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white transition duration-200"
|
||||
>
|
||||
<Trash size={14} />
|
||||
<p className="text-xs">Clear</p>
|
||||
@@ -139,7 +157,8 @@ const Attach = () => {
|
||||
type="button"
|
||||
onClick={() => fileInputRef.current.click()}
|
||||
className={cn(
|
||||
'flex items-center justify-center active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white',
|
||||
'flex flex-row items-center space-x-1 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white',
|
||||
showText ? '' : 'p-2',
|
||||
)}
|
||||
>
|
||||
<input
|
||||
@@ -150,7 +169,8 @@ const Attach = () => {
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Paperclip size={16} />
|
||||
<CopyPlus size={showText ? 18 : undefined} />
|
||||
{showText && <p className="text-xs font-medium pl-[1px]">Attach</p>}
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
@@ -5,14 +5,7 @@ import {
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import {
|
||||
CopyPlus,
|
||||
File,
|
||||
LoaderCircle,
|
||||
Paperclip,
|
||||
Plus,
|
||||
Trash,
|
||||
} from 'lucide-react';
|
||||
import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react';
|
||||
import { Fragment, useRef, useState } from 'react';
|
||||
import { File as FileType } from '../ChatWindow';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
@@ -32,12 +25,12 @@ const AttachSmall = () => {
|
||||
}
|
||||
|
||||
const embeddingModelProvider = localStorage.getItem(
|
||||
'embeddingModelProviderId',
|
||||
'embeddingModelProvider',
|
||||
);
|
||||
const embeddingModel = localStorage.getItem('embeddingModelKey');
|
||||
const embeddingModel = localStorage.getItem('embeddingModel');
|
||||
|
||||
data.append('embedding_model_provider_id', embeddingModelProvider!);
|
||||
data.append('embedding_model_key', embeddingModel!);
|
||||
data.append('embedding_model_provider', embeddingModelProvider!);
|
||||
data.append('embedding_model', embeddingModel!);
|
||||
|
||||
const res = await fetch(`/api/uploads`, {
|
||||
method: 'POST',
|
||||
@@ -52,7 +45,7 @@ const AttachSmall = () => {
|
||||
};
|
||||
|
||||
return loading ? (
|
||||
<div className="flex flex-row items-center justify-between space-x-1 p-1 ">
|
||||
<div className="flex flex-row items-center justify-between space-x-1 p-1">
|
||||
<LoaderCircle size={20} className="text-sky-400 animate-spin" />
|
||||
</div>
|
||||
) : files.length > 0 ? (
|
||||
@@ -148,7 +141,7 @@ const AttachSmall = () => {
|
||||
multiple
|
||||
hidden
|
||||
/>
|
||||
<Paperclip size={16} />
|
||||
<CopyPlus size={20} />
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
@@ -22,13 +22,13 @@ const focusModes = [
|
||||
key: 'webSearch',
|
||||
title: 'All',
|
||||
description: 'Searches across all of the internet',
|
||||
icon: <Globe size={16} />,
|
||||
icon: <Globe size={20} />,
|
||||
},
|
||||
{
|
||||
key: 'academicSearch',
|
||||
title: 'Academic',
|
||||
description: 'Search in published academic papers',
|
||||
icon: <SwatchBook size={16} />,
|
||||
icon: <SwatchBook size={20} />,
|
||||
},
|
||||
{
|
||||
key: 'writingAssistant',
|
||||
@@ -40,19 +40,19 @@ const focusModes = [
|
||||
key: 'wolframAlphaSearch',
|
||||
title: 'Wolfram Alpha',
|
||||
description: 'Computational knowledge engine',
|
||||
icon: <BadgePercent size={16} />,
|
||||
icon: <BadgePercent size={20} />,
|
||||
},
|
||||
{
|
||||
key: 'youtubeSearch',
|
||||
title: 'Youtube',
|
||||
description: 'Search and watch videos',
|
||||
icon: <SiYoutube className="h-[16px] w-auto mr-0.5" />,
|
||||
icon: <SiYoutube className="h-5 w-auto mr-0.5" />,
|
||||
},
|
||||
{
|
||||
key: 'redditSearch',
|
||||
title: 'Reddit',
|
||||
description: 'Search for discussions and opinions',
|
||||
icon: <SiReddit className="h-[16px] w-auto mr-0.5" />,
|
||||
icon: <SiReddit className="h-5 w-auto mr-0.5" />,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -60,18 +60,23 @@ const Focus = () => {
|
||||
const { focusMode, setFocusMode } = useChat();
|
||||
|
||||
return (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
className=" text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
{focusMode !== 'webSearch' ? (
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{focusModes.find((mode) => mode.key === focusMode)?.icon}
|
||||
<p className="text-xs font-medium hidden lg:block">
|
||||
{focusModes.find((mode) => mode.key === focusMode)?.title}
|
||||
</p>
|
||||
<ChevronDown size={20} className="-translate-x-1" />
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
<Globe size={16} />
|
||||
<ScanEye size={20} />
|
||||
<p className="text-xs font-medium hidden lg:block">Focus</p>
|
||||
</div>
|
||||
)}
|
||||
</PopoverButton>
|
||||
@@ -84,14 +89,14 @@ const Focus = () => {
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[500px] -right-4">
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[500px] left-0">
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{focusModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => setFocusMode(mode.key)}
|
||||
key={i}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition focus:outline-none',
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition',
|
||||
focusMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
|
@@ -1,198 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { Cpu, Loader2, Search } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
Popover,
|
||||
PopoverButton,
|
||||
PopoverPanel,
|
||||
Transition,
|
||||
} from '@headlessui/react';
|
||||
import { Fragment, useEffect, useState } from 'react';
|
||||
import { MinimalProvider } from '@/lib/models/types';
|
||||
|
||||
const ModelSelector = () => {
|
||||
const [providers, setProviders] = useState<MinimalProvider[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [selectedModel, setSelectedModel] = useState<{
|
||||
providerId: string;
|
||||
modelKey: string;
|
||||
} | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const loadProviders = async () => {
|
||||
try {
|
||||
setIsLoading(true);
|
||||
const res = await fetch('/api/providers');
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error('Failed to fetch providers');
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
setProviders(data.providers || []);
|
||||
|
||||
const savedProviderId = localStorage.getItem('chatModelProviderId');
|
||||
const savedModelKey = localStorage.getItem('chatModelKey');
|
||||
|
||||
if (savedProviderId && savedModelKey) {
|
||||
setSelectedModel({
|
||||
providerId: savedProviderId,
|
||||
modelKey: savedModelKey,
|
||||
});
|
||||
} else if (data.providers && data.providers.length > 0) {
|
||||
const firstProvider = data.providers.find(
|
||||
(p: MinimalProvider) => p.chatModels.length > 0,
|
||||
);
|
||||
if (firstProvider && firstProvider.chatModels[0]) {
|
||||
setSelectedModel({
|
||||
providerId: firstProvider.id,
|
||||
modelKey: firstProvider.chatModels[0].key,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading providers:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadProviders();
|
||||
}, []);
|
||||
|
||||
const handleModelSelect = (providerId: string, modelKey: string) => {
|
||||
setSelectedModel({ providerId, modelKey });
|
||||
localStorage.setItem('chatModelProviderId', providerId);
|
||||
localStorage.setItem('chatModelKey', modelKey);
|
||||
};
|
||||
|
||||
const filteredProviders = providers
|
||||
.map((provider) => ({
|
||||
...provider,
|
||||
chatModels: provider.chatModels.filter(
|
||||
(model) =>
|
||||
model.name.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
provider.name.toLowerCase().includes(searchQuery.toLowerCase()),
|
||||
),
|
||||
}))
|
||||
.filter((provider) => provider.chatModels.length > 0);
|
||||
|
||||
return (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg focus:outline-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<Cpu size={16} className="text-sky-500" />
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-100"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-100"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-[230px] sm:w-[270px] md:w-[300px] -right-4">
|
||||
<div className="bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden">
|
||||
<div className="p-4 border-b border-light-200 dark:border-dark-200">
|
||||
<div className="relative">
|
||||
<Search
|
||||
size={16}
|
||||
className="absolute left-3 top-1/2 -translate-y-1/2 text-black/40 dark:text-white/40"
|
||||
/>
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search models..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="w-full pl-9 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg text-sm text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none focus:ring-2 focus:ring-sky-500/20 border border-transparent focus:border-sky-500/30 transition duration-200"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="max-h-[320px] overflow-y-auto">
|
||||
{isLoading ? (
|
||||
<div className="flex items-center justify-center py-16">
|
||||
<Loader2
|
||||
className="animate-spin text-black/40 dark:text-white/40"
|
||||
size={24}
|
||||
/>
|
||||
</div>
|
||||
) : filteredProviders.length === 0 ? (
|
||||
<div className="text-center py-16 px-4 text-black/60 dark:text-white/60 text-sm">
|
||||
{searchQuery
|
||||
? 'No models found'
|
||||
: 'No chat models configured'}
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col">
|
||||
{filteredProviders.map((provider, providerIndex) => (
|
||||
<div key={provider.id}>
|
||||
<div className="px-4 py-2.5 sticky top-0 bg-light-primary dark:bg-dark-primary border-b border-light-200/50 dark:border-dark-200/50">
|
||||
<p className="text-xs text-black/50 dark:text-white/50 uppercase tracking-wider">
|
||||
{provider.name}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col px-2 py-2 space-y-0.5">
|
||||
{provider.chatModels.map((model) => (
|
||||
<PopoverButton
|
||||
key={model.key}
|
||||
onClick={() =>
|
||||
handleModelSelect(provider.id, model.key)
|
||||
}
|
||||
className={cn(
|
||||
'px-3 py-2 flex items-center justify-between text-start duration-200 cursor-pointer transition rounded-lg group',
|
||||
selectedModel?.providerId === provider.id &&
|
||||
selectedModel?.modelKey === model.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center space-x-2.5 min-w-0 flex-1">
|
||||
<Cpu
|
||||
size={15}
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
selectedModel?.providerId === provider.id &&
|
||||
selectedModel?.modelKey === model.key
|
||||
? 'text-sky-500'
|
||||
: 'text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70',
|
||||
)}
|
||||
/>
|
||||
<p
|
||||
className={cn(
|
||||
'text-sm truncate',
|
||||
selectedModel?.providerId === provider.id &&
|
||||
selectedModel?.modelKey === model.key
|
||||
? 'text-sky-500 font-medium'
|
||||
: 'text-black/70 dark:text-white/70 group-hover:text-black dark:group-hover:text-white',
|
||||
)}
|
||||
>
|
||||
{model.name}
|
||||
</p>
|
||||
</div>
|
||||
</PopoverButton>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{providerIndex < filteredProviders.length - 1 && (
|
||||
<div className="h-px bg-light-200 dark:bg-dark-200" />
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
||||
export default ModelSelector;
|
@@ -14,13 +14,13 @@ const OptimizationModes = [
|
||||
key: 'speed',
|
||||
title: 'Speed',
|
||||
description: 'Prioritize speed and get the quickest possible answer.',
|
||||
icon: <Zap size={16} className="text-[#FF9800]" />,
|
||||
icon: <Zap size={20} className="text-[#FF9800]" />,
|
||||
},
|
||||
{
|
||||
key: 'balanced',
|
||||
title: 'Balanced',
|
||||
description: 'Find the right balance between speed and accuracy',
|
||||
icon: <Sliders size={16} className="text-[#4CAF50]" />,
|
||||
icon: <Sliders size={20} className="text-[#4CAF50]" />,
|
||||
},
|
||||
{
|
||||
key: 'quality',
|
||||
@@ -40,64 +40,60 @@ const Optimization = () => {
|
||||
|
||||
return (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
{({ open }) => (
|
||||
<>
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white focus:outline-none"
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{
|
||||
OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.icon
|
||||
}
|
||||
<ChevronDown
|
||||
size={16}
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{
|
||||
OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.icon
|
||||
}
|
||||
<p className="text-xs font-medium">
|
||||
{
|
||||
OptimizationModes.find((mode) => mode.key === optimizationMode)
|
||||
?.title
|
||||
}
|
||||
</p>
|
||||
<ChevronDown size={20} />
|
||||
</div>
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-150"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-150"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] right-0">
|
||||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{OptimizationModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => setOptimizationMode(mode.key)}
|
||||
key={i}
|
||||
disabled={mode.key === 'quality'}
|
||||
className={cn(
|
||||
open ? 'rotate-180' : 'rotate-0',
|
||||
'transition duration:200',
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition',
|
||||
optimizationMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
mode.key === 'quality' && 'opacity-50 cursor-not-allowed',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</PopoverButton>
|
||||
<Transition
|
||||
as={Fragment}
|
||||
enter="transition ease-out duration-150"
|
||||
enterFrom="opacity-0 translate-y-1"
|
||||
enterTo="opacity-100 translate-y-0"
|
||||
leave="transition ease-in duration-150"
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] left-0">
|
||||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{OptimizationModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => setOptimizationMode(mode.key)}
|
||||
key={i}
|
||||
disabled={mode.key === 'quality'}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition focus:outline-none',
|
||||
optimizationMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
mode.key === 'quality' && 'opacity-50 cursor-not-allowed',
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1 text-black dark:text-white">
|
||||
{mode.icon}
|
||||
<p className="text-sm font-medium">{mode.title}</p>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{mode.description}
|
||||
</p>
|
||||
</PopoverButton>
|
||||
))}
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
</>
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1 text-black dark:text-white">
|
||||
{mode.icon}
|
||||
<p className="text-sm font-medium">{mode.title}</p>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{mode.description}
|
||||
</p>
|
||||
</PopoverButton>
|
||||
))}
|
||||
</div>
|
||||
</PopoverPanel>
|
||||
</Transition>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
|
@@ -33,10 +33,11 @@ const SearchImages = ({
|
||||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
const chatModelProvider = localStorage.getItem(
|
||||
'chatModelProviderId',
|
||||
);
|
||||
const chatModel = localStorage.getItem('chatModelKey');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
|
||||
const res = await fetch(`/api/images`, {
|
||||
method: 'POST',
|
||||
@@ -47,8 +48,12 @@ const SearchImages = ({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
providerId: chatModelProvider,
|
||||
key: chatModel,
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
@@ -48,10 +48,11 @@ const Searchvideos = ({
|
||||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
const chatModelProvider = localStorage.getItem(
|
||||
'chatModelProviderId',
|
||||
);
|
||||
const chatModel = localStorage.getItem('chatModelKey');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
|
||||
const res = await fetch(`/api/videos`, {
|
||||
method: 'POST',
|
||||
@@ -62,8 +63,12 @@ const Searchvideos = ({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
providerId: chatModelProvider,
|
||||
key: chatModel,
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
@@ -1,29 +0,0 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import SettingsField from '../SettingsField';
|
||||
|
||||
const General = ({
|
||||
fields,
|
||||
values,
|
||||
}: {
|
||||
fields: UIConfigField[];
|
||||
values: Record<string, any>;
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6">
|
||||
{fields.map((field) => (
|
||||
<SettingsField
|
||||
key={field.key}
|
||||
field={field}
|
||||
value={
|
||||
(field.scope === 'client'
|
||||
? localStorage.getItem(field.key)
|
||||
: values[field.key]) ?? field.default
|
||||
}
|
||||
dataAdd="general"
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default General;
|
@@ -1,163 +0,0 @@
|
||||
import { Dialog, DialogPanel } from '@headlessui/react';
|
||||
import { Loader2, Plus } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { AnimatePresence, motion } from 'framer-motion';
|
||||
import { ConfigModelProvider } from '@/lib/config/types';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
const AddModel = ({
|
||||
providerId,
|
||||
setProviders,
|
||||
type,
|
||||
}: {
|
||||
providerId: string;
|
||||
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
||||
type: 'chat' | 'embedding';
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [modelName, setModelName] = useState('');
|
||||
const [modelKey, setModelKey] = useState('');
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch(`/api/providers/${providerId}/models`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: modelName,
|
||||
key: modelKey,
|
||||
type: type,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error('Failed to add model');
|
||||
}
|
||||
|
||||
setProviders((prev) =>
|
||||
prev.map((provider) => {
|
||||
if (provider.id === providerId) {
|
||||
const newModel = { name: modelName, key: modelKey };
|
||||
return {
|
||||
...provider,
|
||||
chatModels:
|
||||
type === 'chat'
|
||||
? [...provider.chatModels, newModel]
|
||||
: provider.chatModels,
|
||||
embeddingModels:
|
||||
type === 'embedding'
|
||||
? [...provider.embeddingModels, newModel]
|
||||
: provider.embeddingModels,
|
||||
};
|
||||
}
|
||||
return provider;
|
||||
}),
|
||||
);
|
||||
|
||||
toast.success('Model added successfully.');
|
||||
setModelName('');
|
||||
setModelKey('');
|
||||
setOpen(false);
|
||||
} catch (error) {
|
||||
console.error('Error adding model:', error);
|
||||
toast.error('Failed to add model.');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
onClick={() => setOpen(true)}
|
||||
className="text-xs text-black/70 dark:text-white/70 hover:text-black hover:dark:text-white flex flex-row items-center space-x-1 active:scale-95 transition duration-200"
|
||||
>
|
||||
<Plus size={12} />
|
||||
<span>Add</span>
|
||||
</button>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<Dialog
|
||||
static
|
||||
open={open}
|
||||
onClose={() => setOpen(false)}
|
||||
className="relative z-[60]"
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{ duration: 0.1 }}
|
||||
className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm"
|
||||
>
|
||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||
<div className="px-6 pt-6 pb-4">
|
||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
||||
Add new {type === 'chat' ? 'chat' : 'embedding'} model
|
||||
</h3>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex-1 overflow-y-auto px-6 py-4">
|
||||
<form
|
||||
onSubmit={handleSubmit}
|
||||
className="flex flex-col h-full"
|
||||
>
|
||||
<div className="flex flex-col space-y-4 flex-1">
|
||||
<div className="flex flex-col items-start space-y-2">
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
Model name*
|
||||
</label>
|
||||
<input
|
||||
value={modelName}
|
||||
onChange={(e) => setModelName(e.target.value)}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder="e.g., GPT-4"
|
||||
type="text"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col items-start space-y-2">
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
Model key*
|
||||
</label>
|
||||
<input
|
||||
value={modelKey}
|
||||
onChange={(e) => setModelKey(e.target.value)}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder="e.g., gpt-4"
|
||||
type="text"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200 -mx-6 my-4" />
|
||||
<div className="flex justify-end">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={loading}
|
||||
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||
>
|
||||
{loading ? (
|
||||
<Loader2 className="animate-spin" size={16} />
|
||||
) : (
|
||||
'Add Model'
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</DialogPanel>
|
||||
</motion.div>
|
||||
</Dialog>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default AddModel;
|
@@ -1,216 +0,0 @@
|
||||
import {
|
||||
Description,
|
||||
Dialog,
|
||||
DialogPanel,
|
||||
DialogTitle,
|
||||
} from '@headlessui/react';
|
||||
import { Loader2, Plus } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { AnimatePresence, motion } from 'framer-motion';
|
||||
import {
|
||||
ConfigModelProvider,
|
||||
ModelProviderUISection,
|
||||
StringUIConfigField,
|
||||
UIConfigField,
|
||||
} from '@/lib/config/types';
|
||||
import Select from '@/components/ui/Select';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
const AddProvider = ({
|
||||
modelProviders,
|
||||
setProviders,
|
||||
}: {
|
||||
modelProviders: ModelProviderUISection[];
|
||||
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [selectedProvider, setSelectedProvider] = useState<null | string>(
|
||||
modelProviders[0]?.key || null,
|
||||
);
|
||||
const [config, setConfig] = useState<Record<string, any>>({});
|
||||
const [name, setName] = useState('');
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const providerConfigMap = useMemo(() => {
|
||||
const map: Record<string, { name: string; fields: UIConfigField[] }> = {};
|
||||
|
||||
modelProviders.forEach((p) => {
|
||||
map[p.key] = {
|
||||
name: p.name,
|
||||
fields: p.fields,
|
||||
};
|
||||
});
|
||||
|
||||
return map;
|
||||
}, [modelProviders]);
|
||||
|
||||
const selectedProviderFields = useMemo(() => {
|
||||
if (!selectedProvider) return [];
|
||||
const providerFields = providerConfigMap[selectedProvider]?.fields || [];
|
||||
const config: Record<string, any> = {};
|
||||
|
||||
providerFields.forEach((field) => {
|
||||
config[field.key] = field.default || '';
|
||||
});
|
||||
|
||||
setConfig(config);
|
||||
|
||||
return providerFields;
|
||||
}, [selectedProvider, providerConfigMap]);
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch('/api/providers', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
type: selectedProvider,
|
||||
name: name,
|
||||
config: config,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error('Failed to add provider');
|
||||
}
|
||||
|
||||
const data: ConfigModelProvider = (await res.json()).provider;
|
||||
|
||||
setProviders((prev) => [...prev, data]);
|
||||
|
||||
toast.success('Provider added successfully.');
|
||||
} catch (error) {
|
||||
console.error('Error adding provider:', error);
|
||||
toast.error('Failed to add provider.');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
setOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
onClick={() => setOpen(true)}
|
||||
className="px-4 py-2 rounded-lg text-sm border border-light-200 dark:border-dark-200 text-black dark:text-white bg-light-secondary/50 dark:bg-dark-secondary/50 hover:bg-light-secondary hover:dark:bg-dark-secondary hover:border-light-300 hover:dark:border-dark-300 flex flex-row items-center space-x-1 active:scale-95 transition duration-200"
|
||||
>
|
||||
<Plus size={16} />
|
||||
<span>Add Provider</span>
|
||||
</button>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<Dialog
|
||||
static
|
||||
open={open}
|
||||
onClose={() => setOpen(false)}
|
||||
className="relative z-[60]"
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{ duration: 0.1 }}
|
||||
className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm"
|
||||
>
|
||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||
<form onSubmit={handleSubmit} className="flex flex-col flex-1">
|
||||
<div className="px-6 pt-6 pb-4">
|
||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
||||
Add new provider
|
||||
</h3>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex-1 overflow-y-auto px-6 py-4">
|
||||
<div className="flex flex-col space-y-4">
|
||||
<div className="flex flex-col items-start space-y-2">
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
Select provider type
|
||||
</label>
|
||||
<Select
|
||||
value={selectedProvider ?? ''}
|
||||
onChange={(e) => setSelectedProvider(e.target.value)}
|
||||
options={Object.entries(providerConfigMap).map(
|
||||
([key, val]) => {
|
||||
return {
|
||||
label: val.name,
|
||||
value: key,
|
||||
};
|
||||
},
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div
|
||||
key="name"
|
||||
className="flex flex-col items-start space-y-2"
|
||||
>
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
Name*
|
||||
</label>
|
||||
<input
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder={'Provider Name'}
|
||||
type="text"
|
||||
required={true}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{selectedProviderFields.map((field: UIConfigField) => (
|
||||
<div
|
||||
key={field.key}
|
||||
className="flex flex-col items-start space-y-2"
|
||||
>
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
{field.name}
|
||||
{field.required && '*'}
|
||||
</label>
|
||||
<input
|
||||
value={config[field.key] ?? field.default ?? ''}
|
||||
onChange={(event) =>
|
||||
setConfig((prev) => ({
|
||||
...prev,
|
||||
[field.key]: event.target.value,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder={
|
||||
(field as StringUIConfigField).placeholder
|
||||
}
|
||||
type="text"
|
||||
required={field.required}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="px-6 py-4 flex justify-end">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={loading}
|
||||
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||
>
|
||||
{loading ? (
|
||||
<Loader2 className="animate-spin" size={16} />
|
||||
) : (
|
||||
'Add Provider'
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</DialogPanel>
|
||||
</motion.div>
|
||||
</Dialog>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default AddProvider;
|
@@ -1,118 +0,0 @@
|
||||
import { Dialog, DialogPanel } from '@headlessui/react';
|
||||
import { Loader2, Trash2 } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { AnimatePresence, motion } from 'framer-motion';
|
||||
import { ConfigModelProvider } from '@/lib/config/types';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
const DeleteProvider = ({
|
||||
modelProvider,
|
||||
setProviders,
|
||||
}: {
|
||||
modelProvider: ConfigModelProvider;
|
||||
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const handleDelete = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch(`/api/providers/${modelProvider.id}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error('Failed to delete provider');
|
||||
}
|
||||
|
||||
setProviders((prev) => {
|
||||
return prev.filter((p) => p.id !== modelProvider.id);
|
||||
});
|
||||
|
||||
toast.success('Provider deleted successfully.');
|
||||
} catch (error) {
|
||||
console.error('Error deleting provider:', error);
|
||||
toast.error('Failed to delete provider.');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setOpen(true);
|
||||
}}
|
||||
className="group p-1.5 rounded-md hover:bg-light-200 hover:dark:bg-dark-200 transition-colors group"
|
||||
title="Delete provider"
|
||||
>
|
||||
<Trash2
|
||||
size={14}
|
||||
className="text-black/60 dark:text-white/60 group-hover:text-red-500 group-hover:dark:text-red-400"
|
||||
/>
|
||||
</button>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<Dialog
|
||||
static
|
||||
open={open}
|
||||
onClose={() => setOpen(false)}
|
||||
className="relative z-[60]"
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{ duration: 0.1 }}
|
||||
className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm"
|
||||
>
|
||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||
<div className="px-6 pt-6 pb-4">
|
||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
||||
Delete provider
|
||||
</h3>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex-1 overflow-y-auto px-6 py-4">
|
||||
<p className="text-SM text-black/60 dark:text-white/60">
|
||||
Are you sure you want to delete the provider "
|
||||
{modelProvider.name}"? This action cannot be undone.
|
||||
</p>
|
||||
</div>
|
||||
<div className="px-6 py-6 flex justify-end space-x-2">
|
||||
<button
|
||||
disabled={loading}
|
||||
onClick={() => setOpen(false)}
|
||||
className="px-4 py-2 rounded-lg text-sm border border-light-200 dark:border-dark-200 text-black dark:text-white bg-light-secondary/50 dark:bg-dark-secondary/50 hover:bg-light-secondary hover:dark:bg-dark-secondary hover:border-light-300 hover:dark:border-dark-300 flex flex-row items-center space-x-1 active:scale-95 transition duration-200"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
disabled={loading}
|
||||
onClick={handleDelete}
|
||||
className="px-4 py-2 rounded-lg text-sm bg-red-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||
>
|
||||
{loading ? (
|
||||
<Loader2 className="animate-spin" size={16} />
|
||||
) : (
|
||||
'Delete'
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</DialogPanel>
|
||||
</motion.div>
|
||||
</Dialog>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default DeleteProvider;
|
@@ -1,217 +0,0 @@
|
||||
import { UIConfigField, ConfigModelProvider } from '@/lib/config/types';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { AnimatePresence, motion } from 'framer-motion';
|
||||
import { AlertCircle, ChevronDown, Pencil, Trash2, X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
import AddModel from './AddModelDialog';
|
||||
import UpdateProvider from './UpdateProviderDialog';
|
||||
import DeleteProvider from './DeleteProviderDialog';
|
||||
|
||||
const ModelProvider = ({
|
||||
modelProvider,
|
||||
setProviders,
|
||||
fields,
|
||||
}: {
|
||||
modelProvider: ConfigModelProvider;
|
||||
fields: UIConfigField[];
|
||||
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
const handleModelDelete = async (
|
||||
type: 'chat' | 'embedding',
|
||||
modelKey: string,
|
||||
) => {
|
||||
try {
|
||||
const res = await fetch(`/api/providers/${modelProvider.id}/models`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ key: modelKey, type: type }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error('Failed to delete model: ' + (await res.text()));
|
||||
}
|
||||
|
||||
setProviders(
|
||||
(prev) =>
|
||||
prev.map((provider) => {
|
||||
if (provider.id === modelProvider.id) {
|
||||
return {
|
||||
...provider,
|
||||
...(type === 'chat'
|
||||
? {
|
||||
chatModels: provider.chatModels.filter(
|
||||
(m) => m.key !== modelKey,
|
||||
),
|
||||
}
|
||||
: {
|
||||
embeddingModels: provider.embeddingModels.filter(
|
||||
(m) => m.key !== modelKey,
|
||||
),
|
||||
}),
|
||||
};
|
||||
}
|
||||
return provider;
|
||||
}) as ConfigModelProvider[],
|
||||
);
|
||||
|
||||
toast.success('Model deleted successfully.');
|
||||
} catch (err) {
|
||||
console.error('Failed to delete model', err);
|
||||
toast.error('Failed to delete model.');
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
key={modelProvider.id}
|
||||
className="border border-light-200 dark:border-dark-200 rounded-lg overflow-hidden"
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'group px-5 py-4 flex flex-row justify-between w-full cursor-pointer hover:bg-light-secondary hover:dark:bg-dark-secondary transition duration-200 items-center',
|
||||
!open && 'rounded-lg',
|
||||
)}
|
||||
onClick={() => setOpen(!open)}
|
||||
>
|
||||
<p className="text-black dark:text-white font-medium">
|
||||
{modelProvider.name}
|
||||
</p>
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex flex-row items-center">
|
||||
<UpdateProvider
|
||||
fields={fields}
|
||||
modelProvider={modelProvider}
|
||||
setProviders={setProviders}
|
||||
/>
|
||||
<DeleteProvider
|
||||
modelProvider={modelProvider}
|
||||
setProviders={setProviders}
|
||||
/>
|
||||
</div>
|
||||
<ChevronDown
|
||||
size={16}
|
||||
className={cn(
|
||||
open ? 'rotate-180' : '',
|
||||
'transition duration-200 text-black/70 dark:text-white/70 group-hover:text-sky-500',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<motion.div
|
||||
initial={{ height: 0, opacity: 0 }}
|
||||
animate={{ height: 'auto', opacity: 1 }}
|
||||
exit={{ height: 0, opacity: 0 }}
|
||||
transition={{ duration: 0.1 }}
|
||||
>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex flex-col gap-y-4 px-5 py-4">
|
||||
{modelProvider.chatModels.length > 0 && (
|
||||
<div className="flex flex-col gap-y-2">
|
||||
<div className="flex flex-row w-full justify-between items-center">
|
||||
<p className="text-xs text-black/70 dark:text-white/70">
|
||||
Chat models
|
||||
</p>
|
||||
<AddModel
|
||||
providerId={modelProvider.id}
|
||||
setProviders={setProviders}
|
||||
type="chat"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
{modelProvider.chatModels.some((m) => m.key === 'error') ? (
|
||||
<div className="flex flex-row items-center gap-2 text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||
<AlertCircle size={16} className="shrink-0" />
|
||||
<span className="break-words">
|
||||
{
|
||||
modelProvider.chatModels.find(
|
||||
(m) => m.key === 'error',
|
||||
)?.name
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row flex-wrap gap-2">
|
||||
{modelProvider.chatModels.map((model, index) => (
|
||||
<div
|
||||
key={`${modelProvider.id}-chat-${model.key}-${index}`}
|
||||
className="flex flex-row items-center space-x-1 text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
||||
>
|
||||
<span>{model.name}</span>
|
||||
<button
|
||||
onClick={() => {
|
||||
handleModelDelete('chat', model.key);
|
||||
}}
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{modelProvider.embeddingModels.length > 0 && (
|
||||
<div className="flex flex-col gap-y-2">
|
||||
<div className="flex flex-row w-full justify-between items-center">
|
||||
<p className="text-xs text-black/70 dark:text-white/70">
|
||||
Embedding models
|
||||
</p>
|
||||
<AddModel
|
||||
providerId={modelProvider.id}
|
||||
setProviders={setProviders}
|
||||
type="embedding"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
{modelProvider.embeddingModels.some(
|
||||
(m) => m.key === 'error',
|
||||
) ? (
|
||||
<div className="flex flex-row items-center gap-2 text-sm text-red-500 dark:text-red-400 rounded-lg bg-red-50 dark:bg-red-950/20 px-3 py-2 border border-red-200 dark:border-red-900/30">
|
||||
<AlertCircle size={16} className="shrink-0" />
|
||||
<span className="break-words">
|
||||
{
|
||||
modelProvider.embeddingModels.find(
|
||||
(m) => m.key === 'error',
|
||||
)?.name
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row flex-wrap gap-2">
|
||||
{modelProvider.embeddingModels.map((model, index) => (
|
||||
<div
|
||||
key={`${modelProvider.id}-embedding-${model.key}-${index}`}
|
||||
className="flex flex-row items-center space-x-1 text-sm text-black/70 dark:text-white/70 rounded-lg bg-light-secondary dark:bg-dark-secondary px-3 py-1.5"
|
||||
>
|
||||
<span>{model.name}</span>
|
||||
<button
|
||||
onClick={() => {
|
||||
handleModelDelete('embedding', model.key);
|
||||
}}
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ModelProvider;
|
@@ -1,44 +0,0 @@
|
||||
import React, { useState } from 'react';
|
||||
import AddProvider from './AddProviderDialog';
|
||||
import {
|
||||
ConfigModelProvider,
|
||||
ModelProviderUISection,
|
||||
UIConfigField,
|
||||
} from '@/lib/config/types';
|
||||
import ModelProvider from './ModelProvider';
|
||||
|
||||
const Models = ({
|
||||
fields,
|
||||
values,
|
||||
}: {
|
||||
fields: ModelProviderUISection[];
|
||||
values: ConfigModelProvider[];
|
||||
}) => {
|
||||
const [providers, setProviders] = useState<ConfigModelProvider[]>(values);
|
||||
|
||||
return (
|
||||
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6">
|
||||
<div className="flex flex-row justify-between items-center">
|
||||
<p className="text-sm text-black/70 dark:text-white/70">
|
||||
Manage model provider
|
||||
</p>
|
||||
<AddProvider modelProviders={fields} setProviders={setProviders} />
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-4">
|
||||
{providers.map((provider) => (
|
||||
<ModelProvider
|
||||
key={`provider-${provider.id}`}
|
||||
fields={
|
||||
(fields.find((f) => f.key === provider.type)?.fields ??
|
||||
[]) as UIConfigField[]
|
||||
}
|
||||
modelProvider={provider}
|
||||
setProviders={setProviders}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Models;
|
@@ -1,188 +0,0 @@
|
||||
import { Dialog, DialogPanel } from '@headlessui/react';
|
||||
import { Loader2, Pencil } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { AnimatePresence, motion } from 'framer-motion';
|
||||
import {
|
||||
ConfigModelProvider,
|
||||
StringUIConfigField,
|
||||
UIConfigField,
|
||||
} from '@/lib/config/types';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
const UpdateProvider = ({
|
||||
modelProvider,
|
||||
fields,
|
||||
setProviders,
|
||||
}: {
|
||||
fields: UIConfigField[];
|
||||
modelProvider: ConfigModelProvider;
|
||||
setProviders: React.Dispatch<React.SetStateAction<ConfigModelProvider[]>>;
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [config, setConfig] = useState<Record<string, any>>({});
|
||||
const [name, setName] = useState(modelProvider.name);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const config: Record<string, any> = {
|
||||
name: modelProvider.name,
|
||||
};
|
||||
|
||||
fields.forEach((field) => {
|
||||
config[field.key] =
|
||||
modelProvider.config[field.key] || field.default || '';
|
||||
});
|
||||
|
||||
setConfig(config);
|
||||
}, [fields]);
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch(`/api/providers/${modelProvider.id}`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: name,
|
||||
config: config,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error('Failed to update provider');
|
||||
}
|
||||
|
||||
const data: ConfigModelProvider = (await res.json()).provider;
|
||||
|
||||
setProviders((prev) => {
|
||||
return prev.map((p) => {
|
||||
if (p.id === modelProvider.id) {
|
||||
return data;
|
||||
}
|
||||
|
||||
return p;
|
||||
});
|
||||
});
|
||||
|
||||
toast.success('Provider updated successfully.');
|
||||
} catch (error) {
|
||||
console.error('Error updating provider:', error);
|
||||
toast.error('Failed to update provider.');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
setOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setOpen(true);
|
||||
}}
|
||||
className="group p-1.5 rounded-md hover:bg-light-200 hover:dark:bg-dark-200 transition-colors group"
|
||||
>
|
||||
<Pencil
|
||||
size={14}
|
||||
className="text-black/60 dark:text-white/60 group-hover:text-black group-hover:dark:text-white"
|
||||
/>
|
||||
</button>
|
||||
<AnimatePresence>
|
||||
{open && (
|
||||
<Dialog
|
||||
static
|
||||
open={open}
|
||||
onClose={() => setOpen(false)}
|
||||
className="relative z-[60]"
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{ duration: 0.1 }}
|
||||
className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm"
|
||||
>
|
||||
<DialogPanel className="w-full mx-4 lg:w-[600px] max-h-[85vh] flex flex-col border bg-light-primary dark:bg-dark-primary border-light-secondary dark:border-dark-secondary rounded-lg">
|
||||
<form onSubmit={handleSubmit} className="flex flex-col flex-1">
|
||||
<div className="px-6 pt-6 pb-4">
|
||||
<h3 className="text-black/90 dark:text-white/90 font-medium">
|
||||
Update provider
|
||||
</h3>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex-1 overflow-y-auto px-6 py-4">
|
||||
<div className="flex flex-col space-y-4">
|
||||
<div
|
||||
key="name"
|
||||
className="flex flex-col items-start space-y-2"
|
||||
>
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
Name*
|
||||
</label>
|
||||
<input
|
||||
value={name}
|
||||
onChange={(event) => setName(event.target.value)}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder={'Provider Name'}
|
||||
type="text"
|
||||
required={true}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{fields.map((field: UIConfigField) => (
|
||||
<div
|
||||
key={field.key}
|
||||
className="flex flex-col items-start space-y-2"
|
||||
>
|
||||
<label className="text-xs text-black/70 dark:text-white/70">
|
||||
{field.name}
|
||||
{field.required && '*'}
|
||||
</label>
|
||||
<input
|
||||
value={config[field.key] ?? field.default ?? ''}
|
||||
onChange={(event) =>
|
||||
setConfig((prev) => ({
|
||||
...prev,
|
||||
[field.key]: event.target.value,
|
||||
}))
|
||||
}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder={
|
||||
(field as StringUIConfigField).placeholder
|
||||
}
|
||||
type="text"
|
||||
required={field.required}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="px-6 py-4 flex justify-end">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={loading}
|
||||
className="px-4 py-2 rounded-lg text-sm bg-sky-500 text-white font-medium disabled:opacity-85 hover:opacity-85 active:scale-95 transition duration-200"
|
||||
>
|
||||
{loading ? (
|
||||
<Loader2 className="animate-spin" size={16} />
|
||||
) : (
|
||||
'Update Provider'
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</DialogPanel>
|
||||
</motion.div>
|
||||
</Dialog>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default UpdateProvider;
|
@@ -1,29 +0,0 @@
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import SettingsField from '../SettingsField';
|
||||
|
||||
const Search = ({
|
||||
fields,
|
||||
values,
|
||||
}: {
|
||||
fields: UIConfigField[];
|
||||
values: Record<string, any>;
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6">
|
||||
{fields.map((field) => (
|
||||
<SettingsField
|
||||
key={field.key}
|
||||
field={field}
|
||||
value={
|
||||
(field.scope === 'client'
|
||||
? localStorage.getItem(field.key)
|
||||
: values[field.key]) ?? field.default
|
||||
}
|
||||
dataAdd="search"
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Search;
|
@@ -1,24 +0,0 @@
|
||||
import { Settings } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import SettingsDialogue from './SettingsDialogue';
|
||||
import { AnimatePresence } from 'framer-motion';
|
||||
|
||||
const SettingsButton = () => {
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
className="p-2.5 rounded-full bg-light-200 text-black/70 dark:bg-dark-200 dark:text-white/70 hover:opacity-70 hover:scale-105 transition duration-200 cursor-pointer active:scale-95"
|
||||
onClick={() => setIsOpen(true)}
|
||||
>
|
||||
<Settings size={19} className="cursor-pointer" />
|
||||
</div>
|
||||
<AnimatePresence>
|
||||
{isOpen && <SettingsDialogue isOpen={isOpen} setIsOpen={setIsOpen} />}
|
||||
</AnimatePresence>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default SettingsButton;
|
@@ -1,152 +0,0 @@
|
||||
import { Dialog, DialogPanel } from '@headlessui/react';
|
||||
import { BrainCog, ChevronLeft, Search, Settings } from 'lucide-react';
|
||||
import General from './Sections/General';
|
||||
import { motion } from 'framer-motion';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
import Loader from '../ui/Loader';
|
||||
import { cn } from '@/lib/utils';
|
||||
import Models from './Sections/Models/Section';
|
||||
import SearchSection from './Sections/Search';
|
||||
|
||||
const sections = [
|
||||
{
|
||||
name: 'General',
|
||||
description: 'Adjust common settings.',
|
||||
icon: Settings,
|
||||
component: General,
|
||||
dataAdd: 'general',
|
||||
},
|
||||
{
|
||||
name: 'Models',
|
||||
description: 'Configure model settings.',
|
||||
icon: BrainCog,
|
||||
component: Models,
|
||||
dataAdd: 'modelProviders',
|
||||
},
|
||||
{
|
||||
name: 'Search',
|
||||
description: 'Manage search settings.',
|
||||
icon: Search,
|
||||
component: SearchSection,
|
||||
dataAdd: 'search',
|
||||
},
|
||||
];
|
||||
|
||||
const SettingsDialogue = ({
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
setIsOpen: (active: boolean) => void;
|
||||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [config, setConfig] = useState<any>(null);
|
||||
const [activeSection, setActiveSection] = useState(sections[0]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
const fetchConfig = async () => {
|
||||
try {
|
||||
const res = await fetch('/api/config', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
setConfig(data);
|
||||
} catch (error) {
|
||||
console.error('Error fetching config:', error);
|
||||
toast.error('Failed to load configuration.');
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchConfig();
|
||||
}
|
||||
}, [isOpen]);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
open={isOpen}
|
||||
onClose={() => setIsOpen(false)}
|
||||
className="relative z-50"
|
||||
>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{ duration: 0.1 }}
|
||||
className="fixed inset-0 flex w-screen items-center justify-center p-4 bg-black/30 backdrop-blur-sm h-screen"
|
||||
>
|
||||
<DialogPanel className="space-y-4 border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary backdrop-blur-lg rounded-xl h-[calc(100vh-2%)] w-[calc(100vw-2%)] md:h-[calc(100vh-7%)] md:w-[calc(100vw-7%)] lg:h-[calc(100vh-20%)] lg:w-[calc(100vw-30%)]">
|
||||
{isLoading ? (
|
||||
<div className="flex items-center justify-center h-full w-full">
|
||||
<Loader />
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-1 inset-0 h-full">
|
||||
<div className="w-[240px] border-r border-white-200 dark:border-dark-200 h-full px-3 pt-3 flex flex-col">
|
||||
<button
|
||||
onClick={() => setIsOpen(false)}
|
||||
className="group flex flex-row items-center hover:bg-light-200 hover:dark:bg-dark-200 p-2 rounded-lg"
|
||||
>
|
||||
<ChevronLeft
|
||||
size={18}
|
||||
className="text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70"
|
||||
/>
|
||||
<p className="text-black/50 dark:text-white/50 group-hover:text-black/70 group-hover:dark:text-white/70 text-[14px]">
|
||||
Back
|
||||
</p>
|
||||
</button>
|
||||
<div className="flex flex-col items-start space-y-1 mt-8">
|
||||
{sections.map((section) => (
|
||||
<button
|
||||
key={section.dataAdd}
|
||||
className={cn(
|
||||
`flex flex-row items-center space-x-2 px-2 py-1.5 rounded-lg w-full text-sm hover:bg-light-200 hover:dark:bg-dark-200 transition duration-200 active:scale-95`,
|
||||
activeSection.name === section.name
|
||||
? 'bg-light-200 dark:bg-dark-200 text-black/90 dark:text-white/90'
|
||||
: ' text-black/70 dark:text-white/70',
|
||||
)}
|
||||
onClick={() => setActiveSection(section)}
|
||||
>
|
||||
<section.icon size={17} />
|
||||
<p>{section.name}</p>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-full">
|
||||
{activeSection.component && (
|
||||
<div className="flex h-full flex-col">
|
||||
<div className="border-b border-light-200/60 px-6 pb-6 pt-8 dark:border-dark-200/60">
|
||||
<div className="flex flex-col">
|
||||
<h4 className="font-medium text-black dark:text-white">
|
||||
{activeSection.name}
|
||||
</h4>
|
||||
<p className="text-xs text-black/50 dark:text-white/50">
|
||||
{activeSection.description}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<activeSection.component
|
||||
fields={config.fields[activeSection.dataAdd]}
|
||||
values={config.values[activeSection.dataAdd]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</DialogPanel>
|
||||
</motion.div>
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
|
||||
export default SettingsDialogue;
|
@@ -1,194 +0,0 @@
|
||||
import {
|
||||
SelectUIConfigField,
|
||||
StringUIConfigField,
|
||||
UIConfigField,
|
||||
} from '@/lib/config/types';
|
||||
import { useState } from 'react';
|
||||
import Select from '../ui/Select';
|
||||
import { toast } from 'sonner';
|
||||
import { useTheme } from 'next-themes';
|
||||
import { Loader2 } from 'lucide-react';
|
||||
|
||||
const SettingsSelect = ({
|
||||
field,
|
||||
value,
|
||||
setValue,
|
||||
dataAdd,
|
||||
}: {
|
||||
field: SelectUIConfigField;
|
||||
value?: any;
|
||||
setValue: (value: any) => void;
|
||||
dataAdd: string;
|
||||
}) => {
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { setTheme } = useTheme();
|
||||
|
||||
const handleSave = async (newValue: any) => {
|
||||
setLoading(true);
|
||||
setValue(newValue);
|
||||
try {
|
||||
if (field.scope === 'client') {
|
||||
localStorage.setItem(field.key, newValue);
|
||||
if (field.key === 'theme') {
|
||||
setTheme(newValue);
|
||||
}
|
||||
} else {
|
||||
const res = await fetch('/api/config', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
key: `${dataAdd}.${field.key}`,
|
||||
value: newValue,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
console.error('Failed to save config:', await res.text());
|
||||
throw new Error('Failed to save configuration');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving config:', error);
|
||||
toast.error('Failed to save configuration.');
|
||||
} finally {
|
||||
setTimeout(() => setLoading(false), 150);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||
<div className="space-y-5">
|
||||
<div>
|
||||
<h4 className="text-base text-black dark:text-white">{field.name}</h4>
|
||||
<p className="text-xs text-black/50 dark:text-white/50">
|
||||
{field.description}
|
||||
</p>
|
||||
</div>
|
||||
<Select
|
||||
value={value}
|
||||
onChange={(event) => handleSave(event.target.value)}
|
||||
options={field.options.map((option) => ({
|
||||
value: option.value,
|
||||
label: option.name,
|
||||
}))}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60 cursor-pointer capitalize pr-12"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
const SettingsInput = ({
|
||||
field,
|
||||
value,
|
||||
setValue,
|
||||
dataAdd,
|
||||
}: {
|
||||
field: StringUIConfigField;
|
||||
value?: any;
|
||||
setValue: (value: any) => void;
|
||||
dataAdd: string;
|
||||
}) => {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const handleSave = async (newValue: any) => {
|
||||
setLoading(true);
|
||||
setValue(newValue);
|
||||
try {
|
||||
if (field.scope === 'client') {
|
||||
localStorage.setItem(field.key, newValue);
|
||||
} else {
|
||||
const res = await fetch('/api/config', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
key: `${dataAdd}.${field.key}`,
|
||||
value: newValue,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
console.error('Failed to save config:', await res.text());
|
||||
throw new Error('Failed to save configuration');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving config:', error);
|
||||
toast.error('Failed to save configuration.');
|
||||
} finally {
|
||||
setTimeout(() => setLoading(false), 150);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||
<div className="space-y-5">
|
||||
<div>
|
||||
<h4 className="text-base text-black dark:text-white">{field.name}</h4>
|
||||
<p className="text-xs text-black/50 dark:text-white/50">
|
||||
{field.description}
|
||||
</p>
|
||||
</div>
|
||||
<div className="relative">
|
||||
<input
|
||||
value={value ?? field.default ?? ''}
|
||||
onChange={(event) => setValue(event.target.value)}
|
||||
onBlur={(event) => handleSave(event.target.value)}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 pr-10 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60"
|
||||
placeholder={field.placeholder}
|
||||
type="text"
|
||||
disabled={loading}
|
||||
/>
|
||||
{loading && (
|
||||
<span className="pointer-events-none absolute right-3 top-1/2 -translate-y-1/2 text-black/40 dark:text-white/40">
|
||||
<Loader2 className="h-4 w-4 animate-spin" />
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
const SettingsField = ({
|
||||
field,
|
||||
value,
|
||||
dataAdd,
|
||||
}: {
|
||||
field: UIConfigField;
|
||||
value: any;
|
||||
dataAdd: string;
|
||||
}) => {
|
||||
const [val, setVal] = useState(value);
|
||||
|
||||
switch (field.type) {
|
||||
case 'select':
|
||||
return (
|
||||
<SettingsSelect
|
||||
field={field}
|
||||
value={val}
|
||||
setValue={setVal}
|
||||
dataAdd={dataAdd}
|
||||
/>
|
||||
);
|
||||
case 'string':
|
||||
return (
|
||||
<SettingsInput
|
||||
field={field}
|
||||
value={val}
|
||||
setValue={setVal}
|
||||
dataAdd={dataAdd}
|
||||
/>
|
||||
);
|
||||
default:
|
||||
return <div>Unsupported field type: {field.type}</div>;
|
||||
}
|
||||
};
|
||||
|
||||
export default SettingsField;
|
@@ -1,34 +1,20 @@
|
||||
'use client';
|
||||
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
BookOpenText,
|
||||
Home,
|
||||
Search,
|
||||
SquarePen,
|
||||
Settings,
|
||||
Plus,
|
||||
ArrowLeft,
|
||||
} from 'lucide-react';
|
||||
import { BookOpenText, Home, Search, SquarePen, Settings } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { useSelectedLayoutSegments } from 'next/navigation';
|
||||
import React, { useState, type ReactNode } from 'react';
|
||||
import Layout from './Layout';
|
||||
import {
|
||||
Description,
|
||||
Dialog,
|
||||
DialogPanel,
|
||||
DialogTitle,
|
||||
} from '@headlessui/react';
|
||||
import SettingsButton from './Settings/SettingsButton';
|
||||
|
||||
const VerticalIconContainer = ({ children }: { children: ReactNode }) => {
|
||||
return <div className="flex flex-col items-center w-full">{children}</div>;
|
||||
return (
|
||||
<div className="flex flex-col items-center gap-y-3 w-full">{children}</div>
|
||||
);
|
||||
};
|
||||
|
||||
const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
||||
const segments = useSelectedLayoutSegments();
|
||||
const [isOpen, setIsOpen] = useState<boolean>(true);
|
||||
|
||||
const navLinks = [
|
||||
{
|
||||
@@ -53,13 +39,10 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="hidden lg:fixed lg:inset-y-0 lg:z-50 lg:flex lg:w-[72px] lg:flex-col border-r border-light-200 dark:border-dark-200">
|
||||
<div className="flex grow flex-col items-center justify-between gap-y-5 overflow-y-auto bg-light-secondary dark:bg-dark-secondary px-2 py-8 shadow-sm shadow-light-200/10 dark:shadow-black/25">
|
||||
<a
|
||||
className="p-2.5 rounded-full bg-light-200 text-black/70 dark:bg-dark-200 dark:text-white/70 hover:opacity-70 hover:scale-105 tansition duration-200"
|
||||
href="/"
|
||||
>
|
||||
<Plus size={19} className="cursor-pointer" />
|
||||
<div className="hidden lg:fixed lg:inset-y-0 lg:z-50 lg:flex lg:w-20 lg:flex-col">
|
||||
<div className="flex grow flex-col items-center justify-between gap-y-5 overflow-y-auto bg-light-secondary dark:bg-dark-secondary px-2 py-8 mx-2 my-2 rounded-2xl shadow-sm shadow-light-200/10 dark:shadow-black/25">
|
||||
<a href="/">
|
||||
<SquarePen className="cursor-pointer" />
|
||||
</a>
|
||||
<VerticalIconContainer>
|
||||
{navLinks.map((link, i) => (
|
||||
@@ -67,45 +50,27 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
||||
key={i}
|
||||
href={link.href}
|
||||
className={cn(
|
||||
'relative flex flex-col items-center justify-center space-y-0.5 cursor-pointer w-full py-2 rounded-lg',
|
||||
'relative flex flex-row items-center justify-center cursor-pointer hover:bg-black/10 dark:hover:bg-white/10 duration-150 transition w-full py-2 rounded-lg',
|
||||
link.active
|
||||
? 'text-black/70 dark:text-white/70 '
|
||||
: 'text-black/60 dark:text-white/60',
|
||||
? 'text-black dark:text-white'
|
||||
: 'text-black/70 dark:text-white/70',
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
link.active && 'bg-light-200 dark:bg-dark-200',
|
||||
'group rounded-lg hover:bg-light-200 hover:dark:bg-dark-200 transition duration-200',
|
||||
)}
|
||||
>
|
||||
<link.icon
|
||||
size={25}
|
||||
className={cn(
|
||||
!link.active && 'group-hover:scale-105',
|
||||
'transition duration:200 m-1.5',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<p
|
||||
className={cn(
|
||||
link.active
|
||||
? 'text-black/80 dark:text-white/80'
|
||||
: 'text-black/60 dark:text-white/60',
|
||||
'text-[10px]',
|
||||
)}
|
||||
>
|
||||
{link.label}
|
||||
</p>
|
||||
<link.icon />
|
||||
{link.active && (
|
||||
<div className="absolute right-0 -mr-2 h-full w-1 rounded-l-lg bg-black dark:bg-white" />
|
||||
)}
|
||||
</Link>
|
||||
))}
|
||||
</VerticalIconContainer>
|
||||
|
||||
<SettingsButton />
|
||||
<Link href="/settings">
|
||||
<Settings className="cursor-pointer" />
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-secondary dark:bg-dark-secondary px-4 py-4 shadow-sm lg:hidden">
|
||||
<div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-primary dark:bg-dark-primary px-4 py-4 shadow-sm lg:hidden">
|
||||
{navLinks.map((link, i) => (
|
||||
<Link
|
||||
href={link.href}
|
||||
|
@@ -1,22 +0,0 @@
|
||||
const Loader = () => {
|
||||
return (
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
className="w-8 h-8 text-light-200 fill-light-secondary dark:text-[#202020] animate-spin dark:fill-[#ffffff3b]"
|
||||
viewBox="0 0 100 101"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M100 50.5908C100.003 78.2051 78.1951 100.003 50.5908 100C22.9765 99.9972 0.997224 78.018 1 50.4037C1.00281 22.7993 22.8108 0.997224 50.4251 1C78.0395 1.00281 100.018 22.8108 100 50.4251ZM9.08164 50.594C9.06312 73.3997 27.7909 92.1272 50.5966 92.1457C73.4023 92.1642 92.1298 73.4365 92.1483 50.6308C92.1669 27.8251 73.4392 9.0973 50.6335 9.07878C27.8278 9.06026 9.10003 27.787 9.08164 50.594Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
<path
|
||||
d="M93.9676 39.0409C96.393 38.4037 97.8624 35.9116 96.9801 33.5533C95.1945 28.8227 92.871 24.3692 90.0681 20.348C85.6237 14.1775 79.4473 9.36872 72.0454 6.45794C64.6435 3.54717 56.3134 2.65431 48.3133 3.89319C45.869 4.27179 44.3768 6.77534 45.014 9.20079C45.6512 11.6262 48.1343 13.0956 50.5786 12.717C56.5073 11.8281 62.5542 12.5399 68.0406 14.7911C73.527 17.0422 78.2187 20.7487 81.5841 25.4923C83.7976 28.5886 85.4467 32.059 86.4416 35.7474C87.1273 38.1189 89.5423 39.6781 91.9676 39.0409Z"
|
||||
fill="currentFill"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
export default Loader;
|
@@ -1,50 +1,28 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Loader2, ChevronDown } from 'lucide-react';
|
||||
import { SelectHTMLAttributes, forwardRef } from 'react';
|
||||
import { SelectHTMLAttributes } from 'react';
|
||||
|
||||
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
||||
options: { value: string; label: string; disabled?: boolean }[];
|
||||
loading?: boolean;
|
||||
}
|
||||
|
||||
export const Select = forwardRef<HTMLSelectElement, SelectProps>(
|
||||
({ className, options, loading = false, disabled, ...restProps }, ref) => {
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'relative inline-flex w-full items-center',
|
||||
disabled && 'opacity-60',
|
||||
)}
|
||||
>
|
||||
<select
|
||||
{...restProps}
|
||||
ref={ref}
|
||||
disabled={disabled || loading}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm appearance-none w-full pr-10',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
{options.map(({ label, value, disabled: optionDisabled }) => {
|
||||
return (
|
||||
<option key={value} value={value} disabled={optionDisabled}>
|
||||
{label}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</select>
|
||||
<span className="pointer-events-none absolute right-3 flex h-4 w-4 items-center justify-center text-black/50 dark:text-white/60">
|
||||
{loading ? (
|
||||
<Loader2 className="h-4 w-4 animate-spin" />
|
||||
) : (
|
||||
<ChevronDown className="h-4 w-4" />
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
Select.displayName = 'Select';
|
||||
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
||||
return (
|
||||
<select
|
||||
{...restProps}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
{options.map(({ label, value, disabled }) => {
|
||||
return (
|
||||
<option key={value} value={value} disabled={disabled}>
|
||||
{label}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</select>
|
||||
);
|
||||
};
|
||||
|
||||
export default Select;
|
||||
|
@@ -1,13 +0,0 @@
|
||||
export const register = async () => {
|
||||
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
||||
try {
|
||||
console.log('Running database migrations...');
|
||||
await import('./lib/db/migrate');
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Failed to run database migrations:', error);
|
||||
}
|
||||
|
||||
await import('./lib/config/index');
|
||||
}
|
||||
};
|
@@ -1,8 +1,11 @@
|
||||
import { Message } from '@/components/ChatWindow';
|
||||
|
||||
export const getSuggestions = async (chatHistory: Message[]) => {
|
||||
const chatModel = localStorage.getItem('chatModelKey');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProviderId');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
|
||||
const res = await fetch(`/api/suggestions`, {
|
||||
method: 'POST',
|
||||
@@ -12,8 +15,12 @@ export const getSuggestions = async (chatHistory: Message[]) => {
|
||||
body: JSON.stringify({
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
providerId: chatModelProvider,
|
||||
key: chatModel,
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIKey,
|
||||
customOpenAIBaseURL,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
158
src/lib/config.ts
Normal file
158
src/lib/config.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import toml from '@iarna/toml';
|
||||
|
||||
// Use dynamic imports for Node.js modules to prevent client-side errors
|
||||
let fs: any;
|
||||
let path: any;
|
||||
if (typeof window === 'undefined') {
|
||||
// We're on the server
|
||||
fs = require('fs');
|
||||
path = require('path');
|
||||
}
|
||||
|
||||
const configFileName = 'config.toml';
|
||||
|
||||
interface Config {
|
||||
GENERAL: {
|
||||
SIMILARITY_MEASURE: string;
|
||||
KEEP_ALIVE: string;
|
||||
};
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
GROQ: {
|
||||
API_KEY: string;
|
||||
};
|
||||
ANTHROPIC: {
|
||||
API_KEY: string;
|
||||
};
|
||||
GEMINI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
OLLAMA: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
};
|
||||
DEEPSEEK: {
|
||||
API_KEY: string;
|
||||
};
|
||||
AIMLAPI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
LM_STUDIO: {
|
||||
API_URL: string;
|
||||
};
|
||||
LEMONADE: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
};
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
MODEL_NAME: string;
|
||||
};
|
||||
};
|
||||
API_ENDPOINTS: {
|
||||
SEARXNG: string;
|
||||
};
|
||||
}
|
||||
|
||||
type RecursivePartial<T> = {
|
||||
[P in keyof T]?: RecursivePartial<T[P]>;
|
||||
};
|
||||
|
||||
const loadConfig = () => {
|
||||
// Server-side only
|
||||
if (typeof window === 'undefined') {
|
||||
return toml.parse(
|
||||
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
||||
) as any as Config;
|
||||
}
|
||||
|
||||
// Client-side fallback - settings will be loaded via API
|
||||
return {} as Config;
|
||||
};
|
||||
|
||||
export const getSimilarityMeasure = () =>
|
||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
|
||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||
|
||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
||||
|
||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||
|
||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||
|
||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||
|
||||
export const getSearxngApiEndpoint = () =>
|
||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
|
||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||
|
||||
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
|
||||
|
||||
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
||||
|
||||
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
|
||||
|
||||
export const getCustomOpenaiApiKey = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||
|
||||
export const getCustomOpenaiApiUrl = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
||||
|
||||
export const getCustomOpenaiModelName = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||
|
||||
export const getLMStudioApiEndpoint = () =>
|
||||
loadConfig().MODELS.LM_STUDIO.API_URL;
|
||||
|
||||
export const getLemonadeApiEndpoint = () =>
|
||||
loadConfig().MODELS.LEMONADE.API_URL;
|
||||
|
||||
export const getLemonadeApiKey = () => loadConfig().MODELS.LEMONADE.API_KEY;
|
||||
|
||||
const mergeConfigs = (current: any, update: any): any => {
|
||||
if (update === null || update === undefined) {
|
||||
return current;
|
||||
}
|
||||
|
||||
if (typeof current !== 'object' || current === null) {
|
||||
return update;
|
||||
}
|
||||
|
||||
const result = { ...current };
|
||||
|
||||
for (const key in update) {
|
||||
if (Object.prototype.hasOwnProperty.call(update, key)) {
|
||||
const updateValue = update[key];
|
||||
|
||||
if (
|
||||
typeof updateValue === 'object' &&
|
||||
updateValue !== null &&
|
||||
typeof result[key] === 'object' &&
|
||||
result[key] !== null
|
||||
) {
|
||||
result[key] = mergeConfigs(result[key], updateValue);
|
||||
} else if (updateValue !== undefined) {
|
||||
result[key] = updateValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
// Server-side only
|
||||
if (typeof window === 'undefined') {
|
||||
const currentConfig = loadConfig();
|
||||
const mergedConfig = mergeConfigs(currentConfig, config);
|
||||
fs.writeFileSync(
|
||||
path.join(path.join(process.cwd(), `${configFileName}`)),
|
||||
toml.stringify(mergedConfig),
|
||||
);
|
||||
}
|
||||
};
|
@@ -1,16 +0,0 @@
|
||||
'use client';
|
||||
|
||||
const getClientConfig = (key: string, defaultVal?: any) => {
|
||||
return localStorage.getItem(key) ?? defaultVal ?? undefined;
|
||||
};
|
||||
|
||||
export const getTheme = () => getClientConfig('theme', 'dark');
|
||||
|
||||
export const getAutoImageSearch = () =>
|
||||
Boolean(getClientConfig('autoImageSearch', 'true'));
|
||||
|
||||
export const getAutoVideoSearch = () =>
|
||||
Boolean(getClientConfig('autoVideoSearch', 'true'));
|
||||
|
||||
export const getSystemInstructions = () =>
|
||||
getClientConfig('systemInstructions', '');
|
@@ -1,331 +0,0 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'fs';
|
||||
import { Config, ConfigModelProvider, UIConfigSections } from './types';
|
||||
import { hashObj } from '../serverUtils';
|
||||
import { getModelProvidersUIConfigSection } from '../models/providers';
|
||||
|
||||
class ConfigManager {
|
||||
configPath: string = path.join(
|
||||
process.env.DATA_DIR || process.cwd(),
|
||||
'/data/config.json',
|
||||
);
|
||||
configVersion = 1;
|
||||
currentConfig: Config = {
|
||||
version: this.configVersion,
|
||||
setupComplete: false,
|
||||
general: {},
|
||||
modelProviders: [],
|
||||
search: {
|
||||
searxngURL: '',
|
||||
},
|
||||
};
|
||||
uiConfigSections: UIConfigSections = {
|
||||
general: [
|
||||
{
|
||||
name: 'Theme',
|
||||
key: 'theme',
|
||||
type: 'select',
|
||||
options: [
|
||||
{
|
||||
name: 'Light',
|
||||
value: 'light',
|
||||
},
|
||||
{
|
||||
name: 'Dark',
|
||||
value: 'dark',
|
||||
},
|
||||
],
|
||||
required: false,
|
||||
description: 'Choose between light and dark layouts for the app.',
|
||||
default: 'dark',
|
||||
scope: 'client',
|
||||
},
|
||||
],
|
||||
modelProviders: [],
|
||||
search: [
|
||||
{
|
||||
name: 'SearXNG URL',
|
||||
key: 'searxngURL',
|
||||
type: 'string',
|
||||
required: false,
|
||||
description: 'The URL of your SearXNG instance',
|
||||
placeholder: 'http://localhost:4000',
|
||||
default: '',
|
||||
scope: 'server',
|
||||
env: 'SEARXNG_API_URL',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
constructor() {
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
private initialize() {
|
||||
this.initializeConfig();
|
||||
this.initializeFromEnv();
|
||||
}
|
||||
|
||||
private saveConfig() {
|
||||
fs.writeFileSync(
|
||||
this.configPath,
|
||||
JSON.stringify(this.currentConfig, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
private initializeConfig() {
|
||||
const exists = fs.existsSync(this.configPath);
|
||||
if (!exists) {
|
||||
fs.writeFileSync(
|
||||
this.configPath,
|
||||
JSON.stringify(this.currentConfig, null, 2),
|
||||
);
|
||||
} else {
|
||||
try {
|
||||
this.currentConfig = JSON.parse(
|
||||
fs.readFileSync(this.configPath, 'utf-8'),
|
||||
);
|
||||
} catch (err) {
|
||||
if (err instanceof SyntaxError) {
|
||||
console.error(
|
||||
`Error parsing config file at ${this.configPath}:`,
|
||||
err,
|
||||
);
|
||||
console.log(
|
||||
'Loading default config and overwriting the existing file.',
|
||||
);
|
||||
fs.writeFileSync(
|
||||
this.configPath,
|
||||
JSON.stringify(this.currentConfig, null, 2),
|
||||
);
|
||||
return;
|
||||
} else {
|
||||
console.log('Unknown error reading config file:', err);
|
||||
}
|
||||
}
|
||||
|
||||
this.currentConfig = this.migrateConfig(this.currentConfig);
|
||||
}
|
||||
}
|
||||
|
||||
private migrateConfig(config: Config): Config {
|
||||
/* TODO: Add migrations */
|
||||
return config;
|
||||
}
|
||||
|
||||
private initializeFromEnv() {
|
||||
/* providers section*/
|
||||
const providerConfigSections = getModelProvidersUIConfigSection();
|
||||
|
||||
this.uiConfigSections.modelProviders = providerConfigSections;
|
||||
|
||||
const newProviders: ConfigModelProvider[] = [];
|
||||
|
||||
providerConfigSections.forEach((provider) => {
|
||||
const newProvider: ConfigModelProvider & { required?: string[] } = {
|
||||
id: crypto.randomUUID(),
|
||||
name: `${provider.name} ${Math.floor(Math.random() * 1000)}`,
|
||||
type: provider.key,
|
||||
chatModels: [],
|
||||
embeddingModels: [],
|
||||
config: {},
|
||||
required: [],
|
||||
hash: '',
|
||||
};
|
||||
|
||||
provider.fields.forEach((field) => {
|
||||
newProvider.config[field.key] =
|
||||
process.env[field.env!] ||
|
||||
field.default ||
|
||||
''; /* Env var must exist for providers */
|
||||
|
||||
if (field.required) newProvider.required?.push(field.key);
|
||||
});
|
||||
|
||||
let configured = true;
|
||||
|
||||
newProvider.required?.forEach((r) => {
|
||||
if (!newProvider.config[r]) {
|
||||
configured = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (configured) {
|
||||
const hash = hashObj(newProvider.config);
|
||||
newProvider.hash = hash;
|
||||
delete newProvider.required;
|
||||
|
||||
const exists = this.currentConfig.modelProviders.find(
|
||||
(p) => p.hash === hash,
|
||||
);
|
||||
|
||||
if (!exists) {
|
||||
newProviders.push(newProvider);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.currentConfig.modelProviders.push(...newProviders);
|
||||
|
||||
/* search section */
|
||||
this.uiConfigSections.search.forEach((f) => {
|
||||
if (f.env && !this.currentConfig.search[f.key]) {
|
||||
this.currentConfig.search[f.key] =
|
||||
process.env[f.env] ?? f.default ?? '';
|
||||
}
|
||||
});
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public getConfig(key: string, defaultValue?: any): any {
|
||||
const nested = key.split('.');
|
||||
let obj: any = this.currentConfig;
|
||||
|
||||
for (let i = 0; i < nested.length; i++) {
|
||||
const part = nested[i];
|
||||
if (obj == null) return defaultValue;
|
||||
|
||||
obj = obj[part];
|
||||
}
|
||||
|
||||
return obj === undefined ? defaultValue : obj;
|
||||
}
|
||||
|
||||
public updateConfig(key: string, val: any) {
|
||||
const parts = key.split('.');
|
||||
if (parts.length === 0) return;
|
||||
|
||||
let target: any = this.currentConfig;
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (target[part] === null || typeof target[part] !== 'object') {
|
||||
target[part] = {};
|
||||
}
|
||||
|
||||
target = target[part];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
target[finalKey] = val;
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public addModelProvider(type: string, name: string, config: any) {
|
||||
const newModelProvider: ConfigModelProvider = {
|
||||
id: crypto.randomUUID(),
|
||||
name,
|
||||
type,
|
||||
config,
|
||||
chatModels: [],
|
||||
embeddingModels: [],
|
||||
hash: hashObj(config),
|
||||
};
|
||||
|
||||
this.currentConfig.modelProviders.push(newModelProvider);
|
||||
this.saveConfig();
|
||||
|
||||
return newModelProvider;
|
||||
}
|
||||
|
||||
public removeModelProvider(id: string) {
|
||||
const index = this.currentConfig.modelProviders.findIndex(
|
||||
(p) => p.id === id,
|
||||
);
|
||||
|
||||
if (index === -1) return;
|
||||
|
||||
this.currentConfig.modelProviders =
|
||||
this.currentConfig.modelProviders.filter((p) => p.id !== id);
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public async updateModelProvider(id: string, name: string, config: any) {
|
||||
const provider = this.currentConfig.modelProviders.find((p) => {
|
||||
return p.id === id;
|
||||
});
|
||||
|
||||
if (!provider) throw new Error('Provider not found');
|
||||
|
||||
provider.name = name;
|
||||
provider.config = config;
|
||||
|
||||
this.saveConfig();
|
||||
|
||||
return provider;
|
||||
}
|
||||
|
||||
public addProviderModel(
|
||||
providerId: string,
|
||||
type: 'embedding' | 'chat',
|
||||
model: any,
|
||||
) {
|
||||
const provider = this.currentConfig.modelProviders.find(
|
||||
(p) => p.id === providerId,
|
||||
);
|
||||
|
||||
if (!provider) throw new Error('Invalid provider id');
|
||||
|
||||
delete model.type;
|
||||
|
||||
if (type === 'chat') {
|
||||
provider.chatModels.push(model);
|
||||
} else {
|
||||
provider.embeddingModels.push(model);
|
||||
}
|
||||
|
||||
this.saveConfig();
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
public removeProviderModel(
|
||||
providerId: string,
|
||||
type: 'embedding' | 'chat',
|
||||
modelKey: string,
|
||||
) {
|
||||
const provider = this.currentConfig.modelProviders.find(
|
||||
(p) => p.id === providerId,
|
||||
);
|
||||
|
||||
if (!provider) throw new Error('Invalid provider id');
|
||||
|
||||
if (type === 'chat') {
|
||||
provider.chatModels = provider.chatModels.filter(
|
||||
(m) => m.key !== modelKey,
|
||||
);
|
||||
} else {
|
||||
provider.embeddingModels = provider.embeddingModels.filter(
|
||||
(m) => m.key != modelKey,
|
||||
);
|
||||
}
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public isSetupComplete() {
|
||||
return this.currentConfig.setupComplete;
|
||||
}
|
||||
|
||||
public markSetupComplete() {
|
||||
if (!this.currentConfig.setupComplete) {
|
||||
this.currentConfig.setupComplete = true;
|
||||
}
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public getUIConfigSections(): UIConfigSections {
|
||||
return this.uiConfigSections;
|
||||
}
|
||||
|
||||
public getCurrentConfig(): Config {
|
||||
return JSON.parse(JSON.stringify(this.currentConfig));
|
||||
}
|
||||
}
|
||||
|
||||
const configManager = new ConfigManager();
|
||||
|
||||
export default configManager;
|
@@ -1,15 +0,0 @@
|
||||
import configManager from './index';
|
||||
import { ConfigModelProvider } from './types';
|
||||
|
||||
export const getConfiguredModelProviders = (): ConfigModelProvider[] => {
|
||||
return configManager.getConfig('modelProviders', []);
|
||||
};
|
||||
|
||||
export const getConfiguredModelProviderById = (
|
||||
id: string,
|
||||
): ConfigModelProvider | undefined => {
|
||||
return getConfiguredModelProviders().find((p) => p.id === id) ?? undefined;
|
||||
};
|
||||
|
||||
export const getSearxngURL = () =>
|
||||
configManager.getConfig('search.searxngURL', '');
|
@@ -1,90 +0,0 @@
|
||||
import { Model } from '../models/types';
|
||||
|
||||
type BaseUIConfigField = {
|
||||
name: string;
|
||||
key: string;
|
||||
required: boolean;
|
||||
description: string;
|
||||
scope: 'client' | 'server';
|
||||
env?: string;
|
||||
};
|
||||
|
||||
type StringUIConfigField = BaseUIConfigField & {
|
||||
type: 'string';
|
||||
placeholder?: string;
|
||||
default?: string;
|
||||
};
|
||||
|
||||
type SelectUIConfigFieldOptions = {
|
||||
name: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
type SelectUIConfigField = BaseUIConfigField & {
|
||||
type: 'select';
|
||||
default?: string;
|
||||
options: SelectUIConfigFieldOptions[];
|
||||
};
|
||||
|
||||
type PasswordUIConfigField = BaseUIConfigField & {
|
||||
type: 'password';
|
||||
placeholder?: string;
|
||||
default?: string;
|
||||
};
|
||||
|
||||
type UIConfigField =
|
||||
| StringUIConfigField
|
||||
| SelectUIConfigField
|
||||
| PasswordUIConfigField;
|
||||
|
||||
type ConfigModelProvider = {
|
||||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
chatModels: Model[];
|
||||
embeddingModels: Model[];
|
||||
config: { [key: string]: any };
|
||||
hash: string;
|
||||
};
|
||||
|
||||
type Config = {
|
||||
version: number;
|
||||
setupComplete: boolean;
|
||||
general: {
|
||||
[key: string]: any;
|
||||
};
|
||||
modelProviders: ConfigModelProvider[];
|
||||
search: {
|
||||
[key: string]: any;
|
||||
};
|
||||
};
|
||||
|
||||
type EnvMap = {
|
||||
[key: string]: {
|
||||
fieldKey: string;
|
||||
providerKey: string;
|
||||
};
|
||||
};
|
||||
|
||||
type ModelProviderUISection = {
|
||||
name: string;
|
||||
key: string;
|
||||
fields: UIConfigField[];
|
||||
};
|
||||
|
||||
type UIConfigSections = {
|
||||
general: UIConfigField[];
|
||||
modelProviders: ModelProviderUISection[];
|
||||
search: UIConfigField[];
|
||||
};
|
||||
|
||||
export type {
|
||||
UIConfigField,
|
||||
Config,
|
||||
EnvMap,
|
||||
UIConfigSections,
|
||||
SelectUIConfigField,
|
||||
StringUIConfigField,
|
||||
ModelProviderUISection,
|
||||
ConfigModelProvider,
|
||||
};
|
@@ -2,12 +2,9 @@ import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || process.cwd();
|
||||
const dbPath = path.join(DATA_DIR, './data/db.sqlite');
|
||||
const db = new Database(path.join(process.cwd(), 'data', 'db.sqlite'));
|
||||
|
||||
const db = new Database(dbPath);
|
||||
|
||||
const migrationsFolder = path.join(DATA_DIR, 'drizzle');
|
||||
const migrationsFolder = path.join(process.cwd(), 'drizzle');
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS ran_migrations (
|
||||
@@ -57,7 +54,7 @@ fs.readdirSync(migrationsFolder)
|
||||
id INTEGER PRIMARY KEY,
|
||||
type TEXT NOT NULL,
|
||||
chatId TEXT NOT NULL,
|
||||
createdAt TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
createdAt TEXT NOT NULL,
|
||||
messageId TEXT NOT NULL,
|
||||
content TEXT,
|
||||
sources TEXT DEFAULT '[]'
|
||||
@@ -70,10 +67,8 @@ fs.readdirSync(migrationsFolder)
|
||||
`);
|
||||
|
||||
messages.forEach((msg: any) => {
|
||||
while (typeof msg.metadata === 'string') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
}
|
||||
if (msg.type === 'user') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run(
|
||||
'user',
|
||||
msg.chatId,
|
||||
@@ -83,6 +78,7 @@ fs.readdirSync(migrationsFolder)
|
||||
'[]',
|
||||
);
|
||||
} else if (msg.type === 'assistant') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run(
|
||||
'assistant',
|
||||
msg.chatId,
|
||||
|
@@ -20,7 +20,6 @@ import crypto from 'crypto';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
import { toast } from 'sonner';
|
||||
import { getSuggestions } from '../actions';
|
||||
import { MinimalProvider } from '../models/types';
|
||||
|
||||
export type Section = {
|
||||
userMessage: UserMessage;
|
||||
@@ -67,13 +66,13 @@ export interface File {
|
||||
}
|
||||
|
||||
interface ChatModelProvider {
|
||||
key: string;
|
||||
providerId: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
}
|
||||
|
||||
interface EmbeddingModelProvider {
|
||||
key: string;
|
||||
providerId: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
}
|
||||
|
||||
const checkConfig = async (
|
||||
@@ -83,12 +82,10 @@ const checkConfig = async (
|
||||
setHasError: (hasError: boolean) => void,
|
||||
) => {
|
||||
try {
|
||||
let chatModelKey = localStorage.getItem('chatModelKey');
|
||||
let chatModelProviderId = localStorage.getItem('chatModelProviderId');
|
||||
let embeddingModelKey = localStorage.getItem('embeddingModelKey');
|
||||
let embeddingModelProviderId = localStorage.getItem(
|
||||
'embeddingModelProviderId',
|
||||
);
|
||||
let chatModel = localStorage.getItem('chatModel');
|
||||
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
let embeddingModel = localStorage.getItem('embeddingModel');
|
||||
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
|
||||
|
||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
||||
@@ -101,81 +98,145 @@ const checkConfig = async (
|
||||
localStorage.setItem('autoVideoSearch', 'false');
|
||||
}
|
||||
|
||||
const res = await fetch(`/api/providers`, {
|
||||
const providers = await fetch(`/api/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}).then(async (res) => {
|
||||
if (!res.ok)
|
||||
throw new Error(
|
||||
`Failed to fetch models: ${res.status} ${res.statusText}`,
|
||||
);
|
||||
return res.json();
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(
|
||||
`Provider fetching failed with status code ${res.status}`,
|
||||
);
|
||||
if (
|
||||
!chatModel ||
|
||||
!chatModelProvider ||
|
||||
!embeddingModel ||
|
||||
!embeddingModelProvider
|
||||
) {
|
||||
if (!chatModel || !chatModelProvider) {
|
||||
const chatModelProviders = providers.chatModelProviders;
|
||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
||||
|
||||
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
|
||||
return toast.error('No chat models available');
|
||||
} else {
|
||||
chatModelProvider =
|
||||
chatModelProvidersKeys.find(
|
||||
(provider) =>
|
||||
Object.keys(chatModelProviders[provider]).length > 0,
|
||||
) || chatModelProvidersKeys[0];
|
||||
}
|
||||
|
||||
if (
|
||||
chatModelProvider === 'custom_openai' &&
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0
|
||||
) {
|
||||
toast.error(
|
||||
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
|
||||
);
|
||||
return setHasError(true);
|
||||
}
|
||||
|
||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
}
|
||||
|
||||
if (!embeddingModel || !embeddingModelProvider) {
|
||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||
|
||||
if (
|
||||
!embeddingModelProviders ||
|
||||
Object.keys(embeddingModelProviders).length === 0
|
||||
)
|
||||
return toast.error('No embedding models available');
|
||||
|
||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||
embeddingModel = Object.keys(
|
||||
embeddingModelProviders[embeddingModelProvider],
|
||||
)[0];
|
||||
}
|
||||
|
||||
localStorage.setItem('chatModel', chatModel!);
|
||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||
localStorage.setItem('embeddingModel', embeddingModel!);
|
||||
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
|
||||
} else {
|
||||
const chatModelProviders = providers.chatModelProviders;
|
||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||
|
||||
if (
|
||||
Object.keys(chatModelProviders).length > 0 &&
|
||||
(!chatModelProviders[chatModelProvider] ||
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
|
||||
) {
|
||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
||||
chatModelProvider =
|
||||
chatModelProvidersKeys.find(
|
||||
(key) => Object.keys(chatModelProviders[key]).length > 0,
|
||||
) || chatModelProvidersKeys[0];
|
||||
|
||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||
}
|
||||
|
||||
if (
|
||||
chatModelProvider &&
|
||||
!chatModelProviders[chatModelProvider][chatModel]
|
||||
) {
|
||||
if (
|
||||
chatModelProvider === 'custom_openai' &&
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0
|
||||
) {
|
||||
toast.error(
|
||||
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
|
||||
);
|
||||
return setHasError(true);
|
||||
}
|
||||
|
||||
chatModel = Object.keys(
|
||||
chatModelProviders[
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length > 0
|
||||
? chatModelProvider
|
||||
: Object.keys(chatModelProviders)[0]
|
||||
],
|
||||
)[0];
|
||||
|
||||
localStorage.setItem('chatModel', chatModel);
|
||||
}
|
||||
|
||||
if (
|
||||
Object.keys(embeddingModelProviders).length > 0 &&
|
||||
!embeddingModelProviders[embeddingModelProvider]
|
||||
) {
|
||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
|
||||
}
|
||||
|
||||
if (
|
||||
embeddingModelProvider &&
|
||||
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||
) {
|
||||
embeddingModel = Object.keys(
|
||||
embeddingModelProviders[embeddingModelProvider],
|
||||
)[0];
|
||||
localStorage.setItem('embeddingModel', embeddingModel);
|
||||
}
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
const providers: MinimalProvider[] = data.providers;
|
||||
|
||||
if (providers.length === 0) {
|
||||
throw new Error(
|
||||
'No chat model providers found, please configure them in the settings page.',
|
||||
);
|
||||
}
|
||||
|
||||
const chatModelProvider =
|
||||
providers.find((p) => p.id === chatModelProviderId) ??
|
||||
providers.find((p) => p.chatModels.length > 0);
|
||||
|
||||
if (!chatModelProvider) {
|
||||
throw new Error(
|
||||
'No chat models found, pleae configure them in the settings page.',
|
||||
);
|
||||
}
|
||||
|
||||
chatModelProviderId = chatModelProvider.id;
|
||||
|
||||
const chatModel =
|
||||
chatModelProvider.chatModels.find((m) => m.key === chatModelKey) ??
|
||||
chatModelProvider.chatModels[0];
|
||||
chatModelKey = chatModel.key;
|
||||
|
||||
const embeddingModelProvider =
|
||||
providers.find((p) => p.id === embeddingModelProviderId) ??
|
||||
providers.find((p) => p.embeddingModels.length > 0);
|
||||
|
||||
if (!embeddingModelProvider) {
|
||||
throw new Error(
|
||||
'No embedding models found, pleae configure them in the settings page.',
|
||||
);
|
||||
}
|
||||
|
||||
embeddingModelProviderId = embeddingModelProvider.id;
|
||||
|
||||
const embeddingModel =
|
||||
embeddingModelProvider.embeddingModels.find(
|
||||
(m) => m.key === embeddingModelKey,
|
||||
) ?? embeddingModelProvider.embeddingModels[0];
|
||||
embeddingModelKey = embeddingModel.key;
|
||||
|
||||
localStorage.setItem('chatModelKey', chatModelKey);
|
||||
localStorage.setItem('chatModelProviderId', chatModelProviderId);
|
||||
localStorage.setItem('embeddingModelKey', embeddingModelKey);
|
||||
localStorage.setItem('embeddingModelProviderId', embeddingModelProviderId);
|
||||
|
||||
setChatModelProvider({
|
||||
key: chatModelKey,
|
||||
providerId: chatModelProviderId,
|
||||
name: chatModel!,
|
||||
provider: chatModelProvider,
|
||||
});
|
||||
|
||||
setEmbeddingModelProvider({
|
||||
key: embeddingModelKey,
|
||||
providerId: embeddingModelProviderId,
|
||||
name: embeddingModel!,
|
||||
provider: embeddingModelProvider,
|
||||
});
|
||||
|
||||
setIsConfigReady(true);
|
||||
} catch (err: any) {
|
||||
} catch (err) {
|
||||
console.error('An error occurred while checking the configuration:', err);
|
||||
toast.error(err.message);
|
||||
setIsConfigReady(false);
|
||||
setHasError(true);
|
||||
}
|
||||
@@ -295,15 +356,15 @@ export const ChatProvider = ({
|
||||
|
||||
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
|
||||
{
|
||||
key: '',
|
||||
providerId: '',
|
||||
name: '',
|
||||
provider: '',
|
||||
},
|
||||
);
|
||||
|
||||
const [embeddingModelProvider, setEmbeddingModelProvider] =
|
||||
useState<EmbeddingModelProvider>({
|
||||
key: '',
|
||||
providerId: '',
|
||||
name: '',
|
||||
provider: '',
|
||||
});
|
||||
|
||||
const [isConfigReady, setIsConfigReady] = useState(false);
|
||||
@@ -681,12 +742,12 @@ export const ChatProvider = ({
|
||||
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
|
||||
: chatHistory,
|
||||
chatModel: {
|
||||
key: chatModelProvider.key,
|
||||
providerId: chatModelProvider.providerId,
|
||||
name: chatModelProvider.name,
|
||||
provider: chatModelProvider.provider,
|
||||
},
|
||||
embeddingModel: {
|
||||
key: embeddingModelProvider.key,
|
||||
providerId: embeddingModelProvider.providerId,
|
||||
name: embeddingModelProvider.name,
|
||||
provider: embeddingModelProvider.provider,
|
||||
},
|
||||
systemInstructions: localStorage.getItem('systemInstructions'),
|
||||
}),
|
||||
|
@@ -1,45 +0,0 @@
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
|
||||
abstract class BaseModelProvider<CONFIG> {
|
||||
constructor(
|
||||
protected id: string,
|
||||
protected name: string,
|
||||
protected config: CONFIG,
|
||||
) {}
|
||||
abstract getDefaultModels(): Promise<ModelList>;
|
||||
abstract getModelList(): Promise<ModelList>;
|
||||
abstract loadChatModel(modelName: string): Promise<BaseChatModel>;
|
||||
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>;
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
throw new Error('Method not Implemented.');
|
||||
}
|
||||
static parseAndValidate(raw: any): any {
|
||||
/* Static methods can't access class type parameters */
|
||||
throw new Error('Method not Implemented.');
|
||||
}
|
||||
}
|
||||
|
||||
export type ProviderConstructor<CONFIG> = {
|
||||
new (id: string, name: string, config: CONFIG): BaseModelProvider<CONFIG>;
|
||||
parseAndValidate(raw: any): CONFIG;
|
||||
getProviderConfigFields: () => UIConfigField[];
|
||||
getProviderMetadata: () => ProviderMetadata;
|
||||
};
|
||||
|
||||
export const createProviderInstance = <P extends ProviderConstructor<any>>(
|
||||
Provider: P,
|
||||
id: string,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): InstanceType<P> => {
|
||||
const cfg = Provider.parseAndValidate(rawConfig);
|
||||
return new Provider(id, name, cfg) as InstanceType<P>;
|
||||
};
|
||||
|
||||
export default BaseModelProvider;
|
@@ -1,23 +0,0 @@
|
||||
import { ModelProviderUISection } from '@/lib/config/types';
|
||||
import { ProviderConstructor } from './baseProvider';
|
||||
import OpenAIProvider from './openai';
|
||||
import OllamaProvider from './ollama';
|
||||
|
||||
export const providers: Record<string, ProviderConstructor<any>> = {
|
||||
openai: OpenAIProvider,
|
||||
ollama: OllamaProvider,
|
||||
};
|
||||
|
||||
export const getModelProvidersUIConfigSection =
|
||||
(): ModelProviderUISection[] => {
|
||||
return Object.entries(providers).map(([k, p]) => {
|
||||
const configFields = p.getProviderConfigFields();
|
||||
const metadata = p.getProviderMetadata();
|
||||
|
||||
return {
|
||||
fields: configFields,
|
||||
key: k,
|
||||
name: metadata.name,
|
||||
};
|
||||
});
|
||||
};
|
@@ -1,137 +0,0 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOllama, OllamaEmbeddings } from '@langchain/ollama';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface OllamaConfig {
|
||||
baseURL: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for the Ollama',
|
||||
required: true,
|
||||
placeholder: 'Ollama Base URL',
|
||||
default: process.env.DOCKER
|
||||
? 'http://host.docker.internal:11434'
|
||||
: 'http://localhost:11434',
|
||||
env: 'OLLAMA_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class OllamaProvider extends BaseModelProvider<OllamaConfig> {
|
||||
constructor(id: string, name: string, config: OllamaConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const res = await fetch(`${this.config.baseURL}/api/tags`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const models: Model[] = data.models.map((m: any) => {
|
||||
return {
|
||||
name: m.name,
|
||||
key: m.model,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: models,
|
||||
chat: models,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to Ollama API. Please ensure the base URL is correct and the Ollama server is running.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Ollama Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOllama({
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
baseUrl: this.config.baseURL,
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Ollama Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new OllamaEmbeddings({
|
||||
model: key,
|
||||
baseUrl: this.config.baseURL,
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): OllamaConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.baseURL)
|
||||
throw new Error('Invalid config provided. Base URL must be provided');
|
||||
|
||||
return {
|
||||
baseURL: String(raw.baseURL),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'ollama',
|
||||
name: 'Ollama',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default OllamaProvider;
|
@@ -1,214 +0,0 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface OpenAIConfig {
|
||||
apiKey: string;
|
||||
baseURL: string;
|
||||
}
|
||||
|
||||
const defaultChatModels: Model[] = [
|
||||
{
|
||||
name: 'GPT-3.5 Turbo',
|
||||
key: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4',
|
||||
key: 'gpt-4',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4 turbo',
|
||||
key: 'gpt-4-turbo',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4 omni',
|
||||
key: 'gpt-4o',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4o (2024-05-13)',
|
||||
key: 'gpt-4o-2024-05-13',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4 omni mini',
|
||||
key: 'gpt-4o-mini',
|
||||
},
|
||||
{
|
||||
name: 'GPT 4.1 nano',
|
||||
key: 'gpt-4.1-nano',
|
||||
},
|
||||
{
|
||||
name: 'GPT 4.1 mini',
|
||||
key: 'gpt-4.1-mini',
|
||||
},
|
||||
{
|
||||
name: 'GPT 4.1',
|
||||
key: 'gpt-4.1',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5 nano',
|
||||
key: 'gpt-5-nano',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5',
|
||||
key: 'gpt-5',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5 Mini',
|
||||
key: 'gpt-5-mini',
|
||||
},
|
||||
{
|
||||
name: 'o1',
|
||||
key: 'o1',
|
||||
},
|
||||
{
|
||||
name: 'o3',
|
||||
key: 'o3',
|
||||
},
|
||||
{
|
||||
name: 'o3 Mini',
|
||||
key: 'o3-mini',
|
||||
},
|
||||
{
|
||||
name: 'o4 Mini',
|
||||
key: 'o4-mini',
|
||||
},
|
||||
];
|
||||
|
||||
const defaultEmbeddingModels: Model[] = [
|
||||
{
|
||||
name: 'Text Embedding 3 Small',
|
||||
key: 'text-embedding-3-small',
|
||||
},
|
||||
{
|
||||
name: 'Text Embedding 3 Large',
|
||||
key: 'text-embedding-3-large',
|
||||
},
|
||||
];
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your OpenAI API key',
|
||||
required: true,
|
||||
placeholder: 'OpenAI API Key',
|
||||
env: 'OPENAI_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for the OpenAI API',
|
||||
required: true,
|
||||
placeholder: 'OpenAI Base URL',
|
||||
default: 'https://api.openai.com/v1',
|
||||
env: 'OPENAI_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
|
||||
constructor(id: string, name: string, config: OpenAIConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
if (this.config.baseURL === 'https://api.openai.com/v1') {
|
||||
return {
|
||||
embedding: defaultEmbeddingModels,
|
||||
chat: defaultChatModels,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: [],
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading OpenAI Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.config.baseURL,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new OpenAIEmbeddings({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.config.baseURL,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): OpenAIConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey || !raw.baseURL)
|
||||
throw new Error(
|
||||
'Invalid config provided. API key and base URL must be provided',
|
||||
);
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
baseURL: String(raw.baseURL),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'openai',
|
||||
name: 'OpenAI',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default OpenAIProvider;
|
@@ -1,223 +0,0 @@
|
||||
import { ConfigModelProvider } from '../config/types';
|
||||
import BaseModelProvider, {
|
||||
createProviderInstance,
|
||||
} from './providers/baseProvider';
|
||||
import { getConfiguredModelProviders } from '../config/serverRegistry';
|
||||
import { providers } from './providers';
|
||||
import { MinimalProvider, ModelList } from './types';
|
||||
import configManager from '../config';
|
||||
|
||||
class ModelRegistry {
|
||||
activeProviders: (ConfigModelProvider & {
|
||||
provider: BaseModelProvider<any>;
|
||||
})[] = [];
|
||||
|
||||
constructor() {
|
||||
this.initializeActiveProviders();
|
||||
}
|
||||
|
||||
private initializeActiveProviders() {
|
||||
const configuredProviders = getConfiguredModelProviders();
|
||||
|
||||
configuredProviders.forEach((p) => {
|
||||
try {
|
||||
const provider = providers[p.type];
|
||||
if (!provider) throw new Error('Invalid provider type');
|
||||
|
||||
this.activeProviders.push({
|
||||
...p,
|
||||
provider: createProviderInstance(provider, p.id, p.name, p.config),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`Failed to initialize provider. Type: ${p.type}, ID: ${p.id}, Config: ${JSON.stringify(p.config)}, Error: ${err}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async getActiveProviders() {
|
||||
const providers: MinimalProvider[] = [];
|
||||
|
||||
await Promise.all(
|
||||
this.activeProviders.map(async (p) => {
|
||||
let m: ModelList = { chat: [], embedding: [] };
|
||||
|
||||
try {
|
||||
m = await p.provider.getModelList();
|
||||
} catch (err: any) {
|
||||
console.error(
|
||||
`Failed to get model list. Type: ${p.type}, ID: ${p.id}, Error: ${err.message}`,
|
||||
);
|
||||
|
||||
m = {
|
||||
chat: [
|
||||
{
|
||||
key: 'error',
|
||||
name: err.message,
|
||||
},
|
||||
],
|
||||
embedding: [],
|
||||
};
|
||||
}
|
||||
|
||||
providers.push({
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
chatModels: m.chat,
|
||||
embeddingModels: m.embedding,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
return providers;
|
||||
}
|
||||
|
||||
async loadChatModel(providerId: string, modelName: string) {
|
||||
const provider = this.activeProviders.find((p) => p.id === providerId);
|
||||
|
||||
if (!provider) throw new Error('Invalid provider id');
|
||||
|
||||
const model = await provider.provider.loadChatModel(modelName);
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(providerId: string, modelName: string) {
|
||||
const provider = this.activeProviders.find((p) => p.id === providerId);
|
||||
|
||||
if (!provider) throw new Error('Invalid provider id');
|
||||
|
||||
const model = await provider.provider.loadEmbeddingModel(modelName);
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
async addProvider(
|
||||
type: string,
|
||||
name: string,
|
||||
config: Record<string, any>,
|
||||
): Promise<ConfigModelProvider> {
|
||||
const provider = providers[type];
|
||||
if (!provider) throw new Error('Invalid provider type');
|
||||
|
||||
const newProvider = configManager.addModelProvider(type, name, config);
|
||||
|
||||
const instance = createProviderInstance(
|
||||
provider,
|
||||
newProvider.id,
|
||||
newProvider.name,
|
||||
newProvider.config,
|
||||
);
|
||||
|
||||
let m: ModelList = { chat: [], embedding: [] };
|
||||
|
||||
try {
|
||||
m = await instance.getModelList();
|
||||
} catch (err: any) {
|
||||
console.error(
|
||||
`Failed to get model list for newly added provider. Type: ${type}, ID: ${newProvider.id}, Error: ${err.message}`,
|
||||
);
|
||||
|
||||
m = {
|
||||
chat: [
|
||||
{
|
||||
key: 'error',
|
||||
name: err.message,
|
||||
},
|
||||
],
|
||||
embedding: [],
|
||||
};
|
||||
}
|
||||
|
||||
this.activeProviders.push({
|
||||
...newProvider,
|
||||
provider: instance,
|
||||
});
|
||||
|
||||
return {
|
||||
...newProvider,
|
||||
chatModels: m.chat || [],
|
||||
embeddingModels: m.embedding || [],
|
||||
};
|
||||
}
|
||||
|
||||
async removeProvider(providerId: string): Promise<void> {
|
||||
configManager.removeModelProvider(providerId);
|
||||
this.activeProviders = this.activeProviders.filter(
|
||||
(p) => p.id !== providerId,
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
async updateProvider(
|
||||
providerId: string,
|
||||
name: string,
|
||||
config: any,
|
||||
): Promise<ConfigModelProvider> {
|
||||
const updated = await configManager.updateModelProvider(
|
||||
providerId,
|
||||
name,
|
||||
config,
|
||||
);
|
||||
const instance = createProviderInstance(
|
||||
providers[updated.type],
|
||||
providerId,
|
||||
name,
|
||||
config,
|
||||
);
|
||||
|
||||
let m: ModelList = { chat: [], embedding: [] };
|
||||
|
||||
try {
|
||||
m = await instance.getModelList();
|
||||
} catch (err: any) {
|
||||
console.error(
|
||||
`Failed to get model list for updated provider. Type: ${updated.type}, ID: ${updated.id}, Error: ${err.message}`,
|
||||
);
|
||||
|
||||
m = {
|
||||
chat: [
|
||||
{
|
||||
key: 'error',
|
||||
name: err.message,
|
||||
},
|
||||
],
|
||||
embedding: [],
|
||||
};
|
||||
}
|
||||
|
||||
this.activeProviders.push({
|
||||
...updated,
|
||||
provider: instance,
|
||||
});
|
||||
|
||||
return {
|
||||
...updated,
|
||||
chatModels: m.chat || [],
|
||||
embeddingModels: m.embedding || [],
|
||||
};
|
||||
}
|
||||
|
||||
/* Using async here because maybe in the future we might want to add some validation?? */
|
||||
async addProviderModel(
|
||||
providerId: string,
|
||||
type: 'embedding' | 'chat',
|
||||
model: any,
|
||||
): Promise<any> {
|
||||
const addedModel = configManager.addProviderModel(providerId, type, model);
|
||||
return addedModel;
|
||||
}
|
||||
|
||||
async removeProviderModel(
|
||||
providerId: string,
|
||||
type: 'embedding' | 'chat',
|
||||
modelKey: string,
|
||||
): Promise<void> {
|
||||
configManager.removeProviderModel(providerId, type, modelKey);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
export default ModelRegistry;
|
@@ -1,34 +0,0 @@
|
||||
type Model = {
|
||||
name: string;
|
||||
key: string;
|
||||
};
|
||||
|
||||
type ModelList = {
|
||||
embedding: Model[];
|
||||
chat: Model[];
|
||||
};
|
||||
|
||||
type ProviderMetadata = {
|
||||
name: string;
|
||||
key: string;
|
||||
};
|
||||
|
||||
type MinimalProvider = {
|
||||
id: string;
|
||||
name: string;
|
||||
chatModels: Model[];
|
||||
embeddingModels: Model[];
|
||||
};
|
||||
|
||||
type ModelWithProvider = {
|
||||
key: string;
|
||||
providerId: string;
|
||||
};
|
||||
|
||||
export type {
|
||||
Model,
|
||||
ModelList,
|
||||
ProviderMetadata,
|
||||
MinimalProvider,
|
||||
ModelWithProvider,
|
||||
};
|
94
src/lib/providers/aimlapi.ts
Normal file
94
src/lib/providers/aimlapi.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { getAimlApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import axios from 'axios';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'aimlapi',
|
||||
displayName: 'AI/ML API',
|
||||
};
|
||||
|
||||
interface AimlApiModel {
|
||||
id: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
}
|
||||
|
||||
const API_URL = 'https://api.aimlapi.com';
|
||||
|
||||
export const loadAimlApiChatModels = async () => {
|
||||
const apiKey = getAimlApiKey();
|
||||
|
||||
if (!apiKey) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${API_URL}/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
response.data.data.forEach((model: AimlApiModel) => {
|
||||
if (model.type === 'chat-completion') {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: apiKey,
|
||||
modelName: model.id,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: API_URL,
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading AI/ML API models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadAimlApiEmbeddingModels = async () => {
|
||||
const apiKey = getAimlApiKey();
|
||||
|
||||
if (!apiKey) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${API_URL}/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
response.data.data.forEach((model: AimlApiModel) => {
|
||||
if (model.type === 'embedding') {
|
||||
embeddingModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: apiKey,
|
||||
modelName: model.id,
|
||||
configuration: {
|
||||
baseURL: API_URL,
|
||||
},
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading AI/ML API embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
78
src/lib/providers/anthropic.ts
Normal file
78
src/lib/providers/anthropic.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { ChatAnthropic } from '@langchain/anthropic';
|
||||
import { ChatModel } from '.';
|
||||
import { getAnthropicApiKey } from '../config';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'anthropic',
|
||||
displayName: 'Anthropic',
|
||||
};
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
const anthropicChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Claude 4.1 Opus',
|
||||
key: 'claude-opus-4-1-20250805',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 4 Opus',
|
||||
key: 'claude-opus-4-20250514',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 4 Sonnet',
|
||||
key: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.7 Sonnet',
|
||||
key: 'claude-3-7-sonnet-20250219',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.5 Haiku',
|
||||
key: 'claude-3-5-haiku-20241022',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.5 Sonnet v2',
|
||||
key: 'claude-3-5-sonnet-20241022',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.5 Sonnet',
|
||||
key: 'claude-3-5-sonnet-20240620',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3 Opus',
|
||||
key: 'claude-3-opus-20240229',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3 Sonnet',
|
||||
key: 'claude-3-sonnet-20240229',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3 Haiku',
|
||||
key: 'claude-3-haiku-20240307',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadAnthropicChatModels = async () => {
|
||||
const anthropicApiKey = getAnthropicApiKey();
|
||||
|
||||
if (!anthropicApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
anthropicChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatAnthropic({
|
||||
apiKey: anthropicApiKey,
|
||||
modelName: model.key,
|
||||
temperature: 0.7,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Anthropic models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
49
src/lib/providers/deepseek.ts
Normal file
49
src/lib/providers/deepseek.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getDeepseekApiKey } from '../config';
|
||||
import { ChatModel } from '.';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'deepseek',
|
||||
displayName: 'Deepseek AI',
|
||||
};
|
||||
|
||||
const deepseekChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Deepseek Chat (Deepseek V3)',
|
||||
key: 'deepseek-chat',
|
||||
},
|
||||
{
|
||||
displayName: 'Deepseek Reasoner (Deepseek R1)',
|
||||
key: 'deepseek-reasoner',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadDeepseekChatModels = async () => {
|
||||
const deepseekApiKey = getDeepseekApiKey();
|
||||
|
||||
if (!deepseekApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
deepseekChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: deepseekApiKey,
|
||||
modelName: model.key,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: 'https://api.deepseek.com',
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Deepseek models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
114
src/lib/providers/gemini.ts
Normal file
114
src/lib/providers/gemini.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import {
|
||||
ChatGoogleGenerativeAI,
|
||||
GoogleGenerativeAIEmbeddings,
|
||||
} from '@langchain/google-genai';
|
||||
import { getGeminiApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'gemini',
|
||||
displayName: 'Google Gemini',
|
||||
};
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
const geminiChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Gemini 2.5 Flash',
|
||||
key: 'gemini-2.5-flash',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.5 Flash-Lite',
|
||||
key: 'gemini-2.5-flash-lite',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.5 Pro',
|
||||
key: 'gemini-2.5-pro',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash',
|
||||
key: 'gemini-2.0-flash',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash-Lite',
|
||||
key: 'gemini-2.0-flash-lite',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash Thinking Experimental',
|
||||
key: 'gemini-2.0-flash-thinking-exp-01-21',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Flash',
|
||||
key: 'gemini-1.5-flash',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Flash-8B',
|
||||
key: 'gemini-1.5-flash-8b',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Pro',
|
||||
key: 'gemini-1.5-pro',
|
||||
},
|
||||
];
|
||||
|
||||
const geminiEmbeddingModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Text Embedding 004',
|
||||
key: 'models/text-embedding-004',
|
||||
},
|
||||
{
|
||||
displayName: 'Embedding 001',
|
||||
key: 'models/embedding-001',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadGeminiChatModels = async () => {
|
||||
const geminiApiKey = getGeminiApiKey();
|
||||
|
||||
if (!geminiApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
geminiChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatGoogleGenerativeAI({
|
||||
apiKey: geminiApiKey,
|
||||
model: model.key,
|
||||
temperature: 0.7,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Gemini models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadGeminiEmbeddingModels = async () => {
|
||||
const geminiApiKey = getGeminiApiKey();
|
||||
|
||||
if (!geminiApiKey) return {};
|
||||
|
||||
try {
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
geminiEmbeddingModels.forEach((model) => {
|
||||
embeddingModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new GoogleGenerativeAIEmbeddings({
|
||||
apiKey: geminiApiKey,
|
||||
modelName: model.key,
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Gemini embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
44
src/lib/providers/groq.ts
Normal file
44
src/lib/providers/groq.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { ChatGroq } from '@langchain/groq';
|
||||
import { getGroqApiKey } from '../config';
|
||||
import { ChatModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'groq',
|
||||
displayName: 'Groq',
|
||||
};
|
||||
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
export const loadGroqChatModels = async () => {
|
||||
const groqApiKey = getGroqApiKey();
|
||||
if (!groqApiKey) return {};
|
||||
|
||||
try {
|
||||
const res = await fetch('https://api.groq.com/openai/v1/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `bearer ${groqApiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const groqChatModels = (await res.json()).data;
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
groqChatModels.forEach((model: any) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.id,
|
||||
model: new ChatGroq({
|
||||
apiKey: groqApiKey,
|
||||
model: model.id,
|
||||
temperature: 0.7,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Groq models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
170
src/lib/providers/index.ts
Normal file
170
src/lib/providers/index.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import {
|
||||
loadOpenAIChatModels,
|
||||
loadOpenAIEmbeddingModels,
|
||||
PROVIDER_INFO as OpenAIInfo,
|
||||
PROVIDER_INFO,
|
||||
} from './openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
loadOllamaChatModels,
|
||||
loadOllamaEmbeddingModels,
|
||||
PROVIDER_INFO as OllamaInfo,
|
||||
} from './ollama';
|
||||
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
|
||||
import {
|
||||
loadAnthropicChatModels,
|
||||
PROVIDER_INFO as AnthropicInfo,
|
||||
} from './anthropic';
|
||||
import {
|
||||
loadGeminiChatModels,
|
||||
loadGeminiEmbeddingModels,
|
||||
PROVIDER_INFO as GeminiInfo,
|
||||
} from './gemini';
|
||||
import {
|
||||
loadTransformersEmbeddingsModels,
|
||||
PROVIDER_INFO as TransformersInfo,
|
||||
} from './transformers';
|
||||
import {
|
||||
loadDeepseekChatModels,
|
||||
PROVIDER_INFO as DeepseekInfo,
|
||||
} from './deepseek';
|
||||
import {
|
||||
loadAimlApiChatModels,
|
||||
loadAimlApiEmbeddingModels,
|
||||
PROVIDER_INFO as AimlApiInfo,
|
||||
} from './aimlapi';
|
||||
import {
|
||||
loadLMStudioChatModels,
|
||||
loadLMStudioEmbeddingsModels,
|
||||
PROVIDER_INFO as LMStudioInfo,
|
||||
} from './lmstudio';
|
||||
import {
|
||||
loadLemonadeChatModels,
|
||||
loadLemonadeEmbeddingModels,
|
||||
PROVIDER_INFO as LemonadeInfo,
|
||||
} from './lemonade';
|
||||
|
||||
export const PROVIDER_METADATA = {
|
||||
openai: OpenAIInfo,
|
||||
ollama: OllamaInfo,
|
||||
groq: GroqInfo,
|
||||
anthropic: AnthropicInfo,
|
||||
gemini: GeminiInfo,
|
||||
transformers: TransformersInfo,
|
||||
deepseek: DeepseekInfo,
|
||||
aimlapi: AimlApiInfo,
|
||||
lmstudio: LMStudioInfo,
|
||||
lemonade: LemonadeInfo,
|
||||
custom_openai: {
|
||||
key: 'custom_openai',
|
||||
displayName: 'Custom OpenAI',
|
||||
},
|
||||
};
|
||||
|
||||
export interface ChatModel {
|
||||
displayName: string;
|
||||
model: BaseChatModel;
|
||||
}
|
||||
|
||||
export interface EmbeddingModel {
|
||||
displayName: string;
|
||||
model: Embeddings;
|
||||
}
|
||||
|
||||
export const chatModelProviders: Record<
|
||||
string,
|
||||
() => Promise<Record<string, ChatModel>>
|
||||
> = {
|
||||
openai: loadOpenAIChatModels,
|
||||
ollama: loadOllamaChatModels,
|
||||
groq: loadGroqChatModels,
|
||||
anthropic: loadAnthropicChatModels,
|
||||
gemini: loadGeminiChatModels,
|
||||
deepseek: loadDeepseekChatModels,
|
||||
aimlapi: loadAimlApiChatModels,
|
||||
lmstudio: loadLMStudioChatModels,
|
||||
lemonade: loadLemonadeChatModels,
|
||||
};
|
||||
|
||||
export const embeddingModelProviders: Record<
|
||||
string,
|
||||
() => Promise<Record<string, EmbeddingModel>>
|
||||
> = {
|
||||
openai: loadOpenAIEmbeddingModels,
|
||||
ollama: loadOllamaEmbeddingModels,
|
||||
gemini: loadGeminiEmbeddingModels,
|
||||
transformers: loadTransformersEmbeddingsModels,
|
||||
aimlapi: loadAimlApiEmbeddingModels,
|
||||
lmstudio: loadLMStudioEmbeddingsModels,
|
||||
lemonade: loadLemonadeEmbeddingModels,
|
||||
};
|
||||
|
||||
export const getAvailableChatModelProviders = async () => {
|
||||
const models: Record<string, Record<string, ChatModel>> = {};
|
||||
|
||||
for (const provider in chatModelProviders) {
|
||||
const providerModels = await chatModelProviders[provider]();
|
||||
if (Object.keys(providerModels).length > 0) {
|
||||
models[provider] = providerModels;
|
||||
}
|
||||
}
|
||||
|
||||
const customOpenAiApiKey = getCustomOpenaiApiKey();
|
||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
||||
const customOpenAiModelName = getCustomOpenaiModelName();
|
||||
|
||||
models['custom_openai'] = {
|
||||
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
||||
? {
|
||||
[customOpenAiModelName]: {
|
||||
displayName: customOpenAiModelName,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: customOpenAiApiKey,
|
||||
modelName: customOpenAiModelName,
|
||||
...(() => {
|
||||
const temperatureRestrictedModels = [
|
||||
'gpt-5-nano',
|
||||
'gpt-5',
|
||||
'gpt-5-mini',
|
||||
'o1',
|
||||
'o3',
|
||||
'o3-mini',
|
||||
'o4-mini',
|
||||
];
|
||||
const isTemperatureRestricted =
|
||||
temperatureRestrictedModels.some((restrictedModel) =>
|
||||
customOpenAiModelName.includes(restrictedModel),
|
||||
);
|
||||
return isTemperatureRestricted ? {} : { temperature: 0.7 };
|
||||
})(),
|
||||
configuration: {
|
||||
baseURL: customOpenAiApiUrl,
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
return models;
|
||||
};
|
||||
|
||||
export const getAvailableEmbeddingModelProviders = async () => {
|
||||
const models: Record<string, Record<string, EmbeddingModel>> = {};
|
||||
|
||||
for (const provider in embeddingModelProviders) {
|
||||
const providerModels = await embeddingModelProviders[provider]();
|
||||
if (Object.keys(providerModels).length > 0) {
|
||||
models[provider] = providerModels;
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
};
|
94
src/lib/providers/lemonade.ts
Normal file
94
src/lib/providers/lemonade.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import axios from 'axios';
|
||||
import { getLemonadeApiEndpoint, getLemonadeApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'lemonade',
|
||||
displayName: 'Lemonade',
|
||||
};
|
||||
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||
|
||||
export const loadLemonadeChatModels = async () => {
|
||||
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
|
||||
const lemonadeApiKey = getLemonadeApiKey();
|
||||
|
||||
if (!lemonadeApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(lemonadeApiKey
|
||||
? { Authorization: `Bearer ${lemonadeApiKey}` }
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const { data: models } = res.data;
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
models.forEach((model: any) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.id,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: lemonadeApiKey || 'lemonade-key',
|
||||
modelName: model.id,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: `${lemonadeApiEndpoint}/api/v1`,
|
||||
},
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Lemonade models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadLemonadeEmbeddingModels = async () => {
|
||||
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
|
||||
const lemonadeApiKey = getLemonadeApiKey();
|
||||
|
||||
if (!lemonadeApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(lemonadeApiKey
|
||||
? { Authorization: `Bearer ${lemonadeApiKey}` }
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const { data: models } = res.data;
|
||||
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
// Filter models that support embeddings (if Lemonade provides this info)
|
||||
// For now, we'll assume all models can be used for embeddings
|
||||
models.forEach((model: any) => {
|
||||
embeddingModels[model.id] = {
|
||||
displayName: model.id,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: lemonadeApiKey || 'lemonade-key',
|
||||
modelName: model.id,
|
||||
configuration: {
|
||||
baseURL: `${lemonadeApiEndpoint}/api/v1`,
|
||||
},
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Lemonade embedding models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
100
src/lib/providers/lmstudio.ts
Normal file
100
src/lib/providers/lmstudio.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { getKeepAlive, getLMStudioApiEndpoint } from '../config';
|
||||
import axios from 'axios';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'lmstudio',
|
||||
displayName: 'LM Studio',
|
||||
};
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
interface LMStudioModel {
|
||||
id: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
const ensureV1Endpoint = (endpoint: string): string =>
|
||||
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
|
||||
|
||||
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
||||
try {
|
||||
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const loadLMStudioChatModels = async () => {
|
||||
const endpoint = getLMStudioApiEndpoint();
|
||||
|
||||
if (!endpoint) return {};
|
||||
if (!(await checkServerAvailability(endpoint))) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
response.data.data.forEach((model: LMStudioModel) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: 'lm-studio',
|
||||
configuration: {
|
||||
baseURL: ensureV1Endpoint(endpoint),
|
||||
},
|
||||
modelName: model.id,
|
||||
temperature: 0.7,
|
||||
streaming: true,
|
||||
maxRetries: 3,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading LM Studio models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadLMStudioEmbeddingsModels = async () => {
|
||||
const endpoint = getLMStudioApiEndpoint();
|
||||
|
||||
if (!endpoint) return {};
|
||||
if (!(await checkServerAvailability(endpoint))) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
const embeddingsModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
response.data.data.forEach((model: LMStudioModel) => {
|
||||
embeddingsModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: 'lm-studio',
|
||||
configuration: {
|
||||
baseURL: ensureV1Endpoint(endpoint),
|
||||
},
|
||||
modelName: model.id,
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingsModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading LM Studio embeddings model: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
86
src/lib/providers/ollama.ts
Normal file
86
src/lib/providers/ollama.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import axios from 'axios';
|
||||
import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'ollama',
|
||||
displayName: 'Ollama',
|
||||
};
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { OllamaEmbeddings } from '@langchain/ollama';
|
||||
|
||||
export const loadOllamaChatModels = async () => {
|
||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
||||
const ollamaApiKey = getOllamaApiKey();
|
||||
|
||||
if (!ollamaApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const { models } = res.data;
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
models.forEach((model: any) => {
|
||||
chatModels[model.model] = {
|
||||
displayName: model.name,
|
||||
model: new ChatOllama({
|
||||
baseUrl: ollamaApiEndpoint,
|
||||
model: model.model,
|
||||
temperature: 0.7,
|
||||
keepAlive: getKeepAlive(),
|
||||
...(ollamaApiKey
|
||||
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
|
||||
: {}),
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Ollama models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadOllamaEmbeddingModels = async () => {
|
||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
||||
const ollamaApiKey = getOllamaApiKey();
|
||||
|
||||
if (!ollamaApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const { models } = res.data;
|
||||
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
models.forEach((model: any) => {
|
||||
embeddingModels[model.model] = {
|
||||
displayName: model.name,
|
||||
model: new OllamaEmbeddings({
|
||||
baseUrl: ollamaApiEndpoint,
|
||||
model: model.model,
|
||||
...(ollamaApiKey
|
||||
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
|
||||
: {}),
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Ollama embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
159
src/lib/providers/openai.ts
Normal file
159
src/lib/providers/openai.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { getOpenaiApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'openai',
|
||||
displayName: 'OpenAI',
|
||||
};
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
const openaiChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'GPT-3.5 Turbo',
|
||||
key: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4',
|
||||
key: 'gpt-4',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4 turbo',
|
||||
key: 'gpt-4-turbo',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4 omni',
|
||||
key: 'gpt-4o',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4o (2024-05-13)',
|
||||
key: 'gpt-4o-2024-05-13',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4 omni mini',
|
||||
key: 'gpt-4o-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 4.1 nano',
|
||||
key: 'gpt-4.1-nano',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 4.1 mini',
|
||||
key: 'gpt-4.1-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 4.1',
|
||||
key: 'gpt-4.1',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 5 nano',
|
||||
key: 'gpt-5-nano',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 5',
|
||||
key: 'gpt-5',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 5 Mini',
|
||||
key: 'gpt-5-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'o1',
|
||||
key: 'o1',
|
||||
},
|
||||
{
|
||||
displayName: 'o3',
|
||||
key: 'o3',
|
||||
},
|
||||
{
|
||||
displayName: 'o3 Mini',
|
||||
key: 'o3-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'o4 Mini',
|
||||
key: 'o4-mini',
|
||||
},
|
||||
];
|
||||
|
||||
const openaiEmbeddingModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Text Embedding 3 Small',
|
||||
key: 'text-embedding-3-small',
|
||||
},
|
||||
{
|
||||
displayName: 'Text Embedding 3 Large',
|
||||
key: 'text-embedding-3-large',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadOpenAIChatModels = async () => {
|
||||
const openaiApiKey = getOpenaiApiKey();
|
||||
|
||||
if (!openaiApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
openaiChatModels.forEach((model) => {
|
||||
// Models that only support temperature = 1
|
||||
const temperatureRestrictedModels = [
|
||||
'gpt-5-nano',
|
||||
'gpt-5',
|
||||
'gpt-5-mini',
|
||||
'o1',
|
||||
'o3',
|
||||
'o3-mini',
|
||||
'o4-mini',
|
||||
];
|
||||
const isTemperatureRestricted = temperatureRestrictedModels.some(
|
||||
(restrictedModel) => model.key.includes(restrictedModel),
|
||||
);
|
||||
|
||||
const modelConfig: any = {
|
||||
apiKey: openaiApiKey,
|
||||
modelName: model.key,
|
||||
};
|
||||
|
||||
// Only add temperature if the model supports it
|
||||
if (!isTemperatureRestricted) {
|
||||
modelConfig.temperature = 0.7;
|
||||
}
|
||||
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading OpenAI models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadOpenAIEmbeddingModels = async () => {
|
||||
const openaiApiKey = getOpenaiApiKey();
|
||||
|
||||
if (!openaiApiKey) return {};
|
||||
|
||||
try {
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
openaiEmbeddingModels.forEach((model) => {
|
||||
embeddingModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: openaiApiKey,
|
||||
modelName: model.key,
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading OpenAI embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
36
src/lib/providers/transformers.ts
Normal file
36
src/lib/providers/transformers.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'transformers',
|
||||
displayName: 'Hugging Face',
|
||||
};
|
||||
|
||||
export const loadTransformersEmbeddingsModels = async () => {
|
||||
try {
|
||||
const embeddingModels = {
|
||||
'xenova-bge-small-en-v1.5': {
|
||||
displayName: 'BGE Small',
|
||||
model: new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/bge-small-en-v1.5',
|
||||
}),
|
||||
},
|
||||
'xenova-gte-small': {
|
||||
displayName: 'GTE Small',
|
||||
model: new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/gte-small',
|
||||
}),
|
||||
},
|
||||
'xenova-bert-base-multilingual-uncased': {
|
||||
displayName: 'Bert Multilingual',
|
||||
model: new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/bert-base-multilingual-uncased',
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Transformers embeddings model: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
@@ -1,5 +1,5 @@
|
||||
import axios from 'axios';
|
||||
import { getSearxngURL } from './config/serverRegistry';
|
||||
import { getSearxngApiEndpoint } from './config';
|
||||
|
||||
interface SearxngSearchOptions {
|
||||
categories?: string[];
|
||||
@@ -23,7 +23,7 @@ export const searchSearxng = async (
|
||||
query: string,
|
||||
opts?: SearxngSearchOptions,
|
||||
) => {
|
||||
const searxngURL = getSearxngURL();
|
||||
const searxngURL = getSearxngApiEndpoint();
|
||||
|
||||
const url = new URL(`${searxngURL}/search?format=json`);
|
||||
url.searchParams.append('q', query);
|
||||
|
@@ -1,7 +0,0 @@
|
||||
import crypto from 'crypto';
|
||||
|
||||
export const hashObj = (obj: { [key: string]: any }) => {
|
||||
const json = JSON.stringify(obj, Object.keys(obj).sort());
|
||||
const hash = crypto.createHash('sha256').update(json).digest('hex');
|
||||
return hash;
|
||||
};
|
5
src/lib/types/compute-dot.d.ts
vendored
Normal file
5
src/lib/types/compute-dot.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
declare function computeDot(vectorA: number[], vectorB: number[]): number;
|
||||
|
||||
declare module 'compute-dot' {
|
||||
export default computeDot;
|
||||
}
|
@@ -1,7 +1,17 @@
|
||||
import dot from 'compute-dot';
|
||||
import cosineSimilarity from 'compute-cosine-similarity';
|
||||
import { getSimilarityMeasure } from '../config';
|
||||
|
||||
const computeSimilarity = (x: number[], y: number[]): number => {
|
||||
return cosineSimilarity(x, y) as number;
|
||||
const similarityMeasure = getSimilarityMeasure();
|
||||
|
||||
if (similarityMeasure === 'cosine') {
|
||||
return cosineSimilarity(x, y) as number;
|
||||
} else if (similarityMeasure === 'dot') {
|
||||
return dot(x, y);
|
||||
}
|
||||
|
||||
throw new Error('Invalid similarity measure');
|
||||
};
|
||||
|
||||
export default computeSimilarity;
|
||||
|
@@ -2,17 +2,17 @@ import type { Config } from 'tailwindcss';
|
||||
import type { DefaultColors } from 'tailwindcss/types/generated/colors';
|
||||
|
||||
const themeDark = (colors: DefaultColors) => ({
|
||||
50: '#0d1117',
|
||||
100: '#161b22',
|
||||
200: '#21262d',
|
||||
300: '#30363d',
|
||||
50: '#111116',
|
||||
100: '#1f202b',
|
||||
200: '#2d2f3f',
|
||||
300: '#3a3c4c',
|
||||
});
|
||||
|
||||
const themeLight = (colors: DefaultColors) => ({
|
||||
50: '#ffffff',
|
||||
100: '#f6f8fa',
|
||||
200: '#e8edf1',
|
||||
300: '#d0d7de',
|
||||
100: '#f1f5f9',
|
||||
200: '#c4c7c5',
|
||||
300: '#9ca3af',
|
||||
});
|
||||
|
||||
const config: Config = {
|
||||
@@ -49,9 +49,6 @@ const config: Config = {
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
require('@tailwindcss/typography'),
|
||||
require('@headlessui/tailwindcss')({ prefix: 'headless' }),
|
||||
],
|
||||
plugins: [require('@tailwindcss/typography')],
|
||||
};
|
||||
export default config;
|
||||
|
158
yarn.lock
158
yarn.lock
@@ -39,13 +39,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
|
||||
integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
|
||||
|
||||
"@cspotcode/source-map-support@^0.8.0":
|
||||
version "0.8.1"
|
||||
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
|
||||
integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==
|
||||
dependencies:
|
||||
"@jridgewell/trace-mapping" "0.3.9"
|
||||
|
||||
"@dabh/diagnostics@^2.0.2":
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
|
||||
@@ -414,11 +407,6 @@
|
||||
"@react-aria/interactions" "^3.21.3"
|
||||
"@tanstack/react-virtual" "^3.8.1"
|
||||
|
||||
"@headlessui/tailwindcss@^0.2.2":
|
||||
version "0.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@headlessui/tailwindcss/-/tailwindcss-0.2.2.tgz#8ebde73fabca72d48636ea56ae790209dc5f0d49"
|
||||
integrity sha512-xNe42KjdyA4kfUKLLPGzME9zkH7Q3rOZ5huFihWNWOQFxnItxPB3/67yBI8/qBfY8nwBRx5GHn4VprsoluVMGw==
|
||||
|
||||
"@huggingface/jinja@^0.2.2":
|
||||
version "0.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
|
||||
@@ -587,7 +575,7 @@
|
||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||
"@jridgewell/trace-mapping" "^0.3.24"
|
||||
|
||||
"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0":
|
||||
"@jridgewell/resolve-uri@^3.1.0":
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6"
|
||||
integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==
|
||||
@@ -602,14 +590,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32"
|
||||
integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==
|
||||
|
||||
"@jridgewell/trace-mapping@0.3.9":
|
||||
version "0.3.9"
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9"
|
||||
integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==
|
||||
dependencies:
|
||||
"@jridgewell/resolve-uri" "^3.0.3"
|
||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||
|
||||
"@jridgewell/trace-mapping@^0.3.24":
|
||||
version "0.3.25"
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0"
|
||||
@@ -681,6 +661,33 @@
|
||||
groq-sdk "^0.19.0"
|
||||
zod "^3.22.4"
|
||||
|
||||
"@langchain/langgraph-checkpoint@^0.1.1":
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/langgraph-checkpoint/-/langgraph-checkpoint-0.1.1.tgz#500569a02af4b85172d775de63eeba06afa0c189"
|
||||
integrity sha512-h2bP0RUikQZu0Um1ZUPErQLXyhzroJqKRbRcxYRTAh49oNlsfeq4A3K4YEDRbGGuyPZI/Jiqwhks1wZwY73AZw==
|
||||
dependencies:
|
||||
uuid "^10.0.0"
|
||||
|
||||
"@langchain/langgraph-sdk@~0.1.0":
|
||||
version "0.1.9"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/langgraph-sdk/-/langgraph-sdk-0.1.9.tgz#5442bd1a4257b5d94927af6e09b0aed341ae8a1d"
|
||||
integrity sha512-7WEDHtbI3pYPUiiHq+dPaF92ZN2W7lqObdpK0X+roa8zPdHUjve/HiqYuKNWS12u1N+L5QIuQWqZvVNvUA7BfQ==
|
||||
dependencies:
|
||||
"@types/json-schema" "^7.0.15"
|
||||
p-queue "^6.6.2"
|
||||
p-retry "4"
|
||||
uuid "^9.0.0"
|
||||
|
||||
"@langchain/langgraph@^0.4.9":
|
||||
version "0.4.9"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.4.9.tgz#470a238ea98662d6ec9dfc42859a00acad00fc81"
|
||||
integrity sha512-+rcdTGi4Ium4X/VtIX3Zw4RhxEkYWpwUyz806V6rffjHOAMamg6/WZDxpJbrP33RV/wJG1GH12Z29oX3Pqq3Aw==
|
||||
dependencies:
|
||||
"@langchain/langgraph-checkpoint" "^0.1.1"
|
||||
"@langchain/langgraph-sdk" "~0.1.0"
|
||||
uuid "^10.0.0"
|
||||
zod "^3.25.32"
|
||||
|
||||
"@langchain/ollama@^0.2.3":
|
||||
version "0.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-0.2.3.tgz#4868e66db4fc480f08c42fc652274abbab0416f0"
|
||||
@@ -947,26 +954,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4"
|
||||
integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw==
|
||||
|
||||
"@tsconfig/node10@^1.0.7":
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2"
|
||||
integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==
|
||||
|
||||
"@tsconfig/node12@^1.0.7":
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d"
|
||||
integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==
|
||||
|
||||
"@tsconfig/node14@^1.0.0":
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1"
|
||||
integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==
|
||||
|
||||
"@tsconfig/node16@^1.0.2":
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9"
|
||||
integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==
|
||||
|
||||
"@types/better-sqlite3@^7.6.12":
|
||||
version "7.6.12"
|
||||
resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7"
|
||||
@@ -979,6 +966,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/html-to-text/-/html-to-text-9.0.4.tgz#4a83dd8ae8bfa91457d0b1ffc26f4d0537eff58c"
|
||||
integrity sha512-pUY3cKH/Nm2yYrEmDlPR1mR7yszjGx4DrwPjQ702C4/D5CwHuZTgZdIdwPkRbcuhs7BAh2L5rg3CL5cbRiGTCQ==
|
||||
|
||||
"@types/json-schema@^7.0.15":
|
||||
version "7.0.15"
|
||||
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
|
||||
integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==
|
||||
|
||||
"@types/json5@^0.0.29":
|
||||
version "0.0.29"
|
||||
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
|
||||
@@ -1159,18 +1151,6 @@ acorn-jsx@^5.3.2:
|
||||
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
|
||||
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
|
||||
|
||||
acorn-walk@^8.1.1:
|
||||
version "8.3.4"
|
||||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7"
|
||||
integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==
|
||||
dependencies:
|
||||
acorn "^8.11.0"
|
||||
|
||||
acorn@^8.11.0, acorn@^8.4.1:
|
||||
version "8.15.0"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816"
|
||||
integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==
|
||||
|
||||
acorn@^8.9.0:
|
||||
version "8.11.3"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
||||
@@ -1233,11 +1213,6 @@ anymatch@~3.1.2:
|
||||
normalize-path "^3.0.0"
|
||||
picomatch "^2.0.4"
|
||||
|
||||
arg@^4.1.0:
|
||||
version "4.1.3"
|
||||
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
|
||||
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
|
||||
|
||||
arg@^5.0.2:
|
||||
version "5.0.2"
|
||||
resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
|
||||
@@ -1796,11 +1771,6 @@ core-util-is@~1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
||||
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
||||
|
||||
create-require@^1.1.0:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
|
||||
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
|
||||
|
||||
cross-fetch@^3.1.5:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3"
|
||||
@@ -1945,11 +1915,6 @@ didyoumean@^1.2.2:
|
||||
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
|
||||
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
|
||||
|
||||
diff@^4.0.1:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
|
||||
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
||||
|
||||
dingbat-to-unicode@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83"
|
||||
@@ -2709,15 +2674,6 @@ fraction.js@^4.3.7:
|
||||
resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7"
|
||||
integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==
|
||||
|
||||
framer-motion@^12.23.24:
|
||||
version "12.23.24"
|
||||
resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-12.23.24.tgz#4895b67e880bd2b1089e61fbaa32ae802fc24b8c"
|
||||
integrity sha512-HMi5HRoRCTou+3fb3h9oTLyJGBxHfW+HnNE25tAXOvVx/IvwMHK0cx7IR4a2ZU6sh3IX1Z+4ts32PcYBOqka8w==
|
||||
dependencies:
|
||||
motion-dom "^12.23.23"
|
||||
motion-utils "^12.23.6"
|
||||
tslib "^2.4.0"
|
||||
|
||||
fs-constants@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
|
||||
@@ -3586,11 +3542,6 @@ lucide-react@^0.363.0:
|
||||
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3"
|
||||
integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ==
|
||||
|
||||
make-error@^1.1.1:
|
||||
version "1.3.6"
|
||||
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
|
||||
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
|
||||
|
||||
mammoth@^1.9.1:
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf"
|
||||
@@ -3683,18 +3634,6 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
|
||||
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
|
||||
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
|
||||
|
||||
motion-dom@^12.23.23:
|
||||
version "12.23.23"
|
||||
resolved "https://registry.yarnpkg.com/motion-dom/-/motion-dom-12.23.23.tgz#8f874333ea1a04ee3a89eb928f518b463d589e0e"
|
||||
integrity sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==
|
||||
dependencies:
|
||||
motion-utils "^12.23.6"
|
||||
|
||||
motion-utils@^12.23.6:
|
||||
version "12.23.6"
|
||||
resolved "https://registry.yarnpkg.com/motion-utils/-/motion-utils-12.23.6.tgz#fafef80b4ea85122dd0d6c599a0c63d72881f312"
|
||||
integrity sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==
|
||||
|
||||
ms@2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
|
||||
@@ -5064,25 +5003,6 @@ ts-interface-checker@^0.1.9:
|
||||
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
|
||||
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
|
||||
|
||||
ts-node@^10.9.2:
|
||||
version "10.9.2"
|
||||
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f"
|
||||
integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==
|
||||
dependencies:
|
||||
"@cspotcode/source-map-support" "^0.8.0"
|
||||
"@tsconfig/node10" "^1.0.7"
|
||||
"@tsconfig/node12" "^1.0.7"
|
||||
"@tsconfig/node14" "^1.0.0"
|
||||
"@tsconfig/node16" "^1.0.2"
|
||||
acorn "^8.4.1"
|
||||
acorn-walk "^8.1.1"
|
||||
arg "^4.1.0"
|
||||
create-require "^1.1.0"
|
||||
diff "^4.0.1"
|
||||
make-error "^1.1.1"
|
||||
v8-compile-cache-lib "^3.0.1"
|
||||
yn "3.1.1"
|
||||
|
||||
tsconfig-paths@^3.15.0:
|
||||
version "3.15.0"
|
||||
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4"
|
||||
@@ -5245,16 +5165,11 @@ uuid@^11.1.0:
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.1.0.tgz#9549028be1753bb934fc96e2bca09bb4105ae912"
|
||||
integrity sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==
|
||||
|
||||
uuid@^9.0.1:
|
||||
uuid@^9.0.0, uuid@^9.0.1:
|
||||
version "9.0.1"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
|
||||
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
|
||||
|
||||
v8-compile-cache-lib@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
|
||||
integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==
|
||||
|
||||
validate.io-array@^1.0.3, validate.io-array@^1.0.5:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d"
|
||||
@@ -5472,11 +5387,6 @@ yet-another-react-lightbox@^3.17.2:
|
||||
resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c"
|
||||
integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ==
|
||||
|
||||
yn@3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
|
||||
|
||||
yocto-queue@^0.1.0:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
||||
|
Reference in New Issue
Block a user