Compare commits

...

39 Commits

Author SHA1 Message Date
ItzCrazyKns
d97fa708f1 feat(config-route): use new config manager & model registry 2025-10-16 20:42:04 +05:30
ItzCrazyKns
0c7566bb87 feat(sidebar): fix colors on smaller devices 2025-10-16 18:03:40 +05:30
ItzCrazyKns
0ff1be47bf feat(routes): use new model registry 2025-10-16 18:01:25 +05:30
ItzCrazyKns
768578951c feat(chat-route): use new model registry 2025-10-16 17:58:13 +05:30
ItzCrazyKns
9706079ed4 feat(config): add searxngURL 2025-10-16 17:57:30 +05:30
ItzCrazyKns
9219593ee1 feat(model-registry): add loading method 2025-10-16 17:56:57 +05:30
ItzCrazyKns
36fdb6491d feat(model-types): add ModelWithProvider type 2025-10-16 17:56:14 +05:30
ItzCrazyKns
0d2cd4bb1e feat(app): remove compute-dot, make cosine default 2025-10-16 17:53:31 +05:30
ItzCrazyKns
b67ca79e2a feat(config): add searxngURL 2025-10-15 13:02:08 +05:30
ItzCrazyKns
626cb646e2 feat(chat-hook): use new providers endpoint 2025-10-15 12:55:22 +05:30
ItzCrazyKns
410201b117 feat(api/models): rename to providers, use new model registry 2025-10-15 12:54:54 +05:30
ItzCrazyKns
30fb1e312b feat(modelRegistry): add MinimalProvider type 2025-10-15 12:53:36 +05:30
ItzCrazyKns
cc5eea17e4 feat(app): remove old providers & registry 2025-10-15 12:53:05 +05:30
ItzCrazyKns
4ee3173368 feat(config): add setupComplete 2025-10-15 09:04:48 +05:30
ItzCrazyKns
6d61528347 feat(configManager): add update & removal methods 2025-10-14 13:33:14 +05:30
ItzCrazyKns
c02e535f4c feat(openAiProvider): use find instead of filter 2025-10-14 13:05:56 +05:30
ItzCrazyKns
a375de73cc feat(openAiProvider): load models from config as well 2025-10-14 13:05:19 +05:30
ItzCrazyKns
87226957f1 feat(serverRegistry): add server-side config opts 2025-10-14 13:04:26 +05:30
ItzCrazyKns
77743949c7 feat(configManager): fix circular import issues 2025-10-14 13:04:06 +05:30
ItzCrazyKns
64c4514cad feat(models): add id & name fields, move getUIConfigSection to seperate file 2025-10-14 13:03:17 +05:30
ItzCrazyKns
999553877d feat(app): lint & beautify 2025-10-14 10:05:31 +05:30
ItzCrazyKns
e45a9af9ff feat(config): add client side and server side config registries 2025-10-13 22:01:17 +05:30
ItzCrazyKns
e7fbab12ed feat(config): add getConfig method 2025-10-13 21:58:30 +05:30
ItzCrazyKns
387da5dbdd feat(instrumentation): run config migrations 2025-10-11 18:00:31 +05:30
ItzCrazyKns
3003d44544 feat(app): initialize new config management 2025-10-11 18:00:06 +05:30
ItzCrazyKns
f1e6aa9c1a feat(app): add serverUtils, create hashObj util 2025-10-11 17:59:45 +05:30
ItzCrazyKns
f39638fe02 feat(app): initialize new model management 2025-10-11 17:59:27 +05:30
ItzCrazyKns
535c0b9897 feat(app): use instrumentation for migrations 2025-10-11 17:35:27 +05:30
ItzCrazyKns
47350b34ec feat(ui): make ui more reactive 2025-10-08 22:19:58 +05:30
ItzCrazyKns
7c97df98c7 Update perplexica-screenshot.png 2025-10-07 16:45:55 +05:30
ItzCrazyKns
b084c42aca Update perplexica-screenshot.png 2025-10-07 16:21:31 +05:30
ItzCrazyKns
fdfa2f3ea6 Update perplexica-screenshot.png 2025-10-07 16:13:56 +05:30
ItzCrazyKns
3323e7a0ed feat(package): bump version, update screenshot 2025-10-07 16:11:10 +05:30
ItzCrazyKns
d4f9da34c6 feat(tailwind-config): update theme 2025-10-07 15:07:26 +05:30
ItzCrazyKns
10ed67c753 feat(workflow): build images for canary 2025-10-06 20:00:31 +05:30
ItzCrazyKns
cf3cc4e638 feat(migrator): use DATA_DIR env var 2025-10-06 10:13:34 +05:30
ItzCrazyKns
46b9e41100 feat(db-table): add default values for createdAt 2025-10-06 08:59:01 +05:30
sjiampojamarn
02adafbd4b Handling double stringify JSON parsing 2025-10-05 10:46:01 -07:00
ItzCrazyKns
f141d4719c feat(assets): update preview image 2025-10-05 22:32:17 +05:30
47 changed files with 1166 additions and 1763 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 641 KiB

After

Width:  |  Height:  |  Size: 2.1 MiB

View File

@@ -4,6 +4,7 @@ on:
push: push:
branches: branches:
- master - master
- canary
release: release:
types: [published] types: [published]
@@ -43,6 +44,19 @@ jobs:
-t itzcrazykns1337/${IMAGE_NAME}:amd64 \ -t itzcrazykns1337/${IMAGE_NAME}:amd64 \
--push . --push .
- name: Build and push AMD64 Canary Docker image
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
run: |
DOCKERFILE=app.dockerfile
IMAGE_NAME=perplexica
docker buildx build --platform linux/amd64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
--cache-to=type=inline \
--provenance false \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
--push .
- name: Build and push AMD64 release Docker image - name: Build and push AMD64 release Docker image
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |
@@ -91,6 +105,19 @@ jobs:
-t itzcrazykns1337/${IMAGE_NAME}:arm64 \ -t itzcrazykns1337/${IMAGE_NAME}:arm64 \
--push . --push .
- name: Build and push ARM64 Canary Docker image
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
run: |
DOCKERFILE=app.dockerfile
IMAGE_NAME=perplexica
docker buildx build --platform linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \
--cache-to=type=inline \
--provenance false \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \
--push .
- name: Build and push ARM64 release Docker image - name: Build and push ARM64 release Docker image
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |
@@ -128,6 +155,15 @@ jobs:
--amend itzcrazykns1337/${IMAGE_NAME}:arm64 --amend itzcrazykns1337/${IMAGE_NAME}:arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:main docker manifest push itzcrazykns1337/${IMAGE_NAME}:main
- name: Create and push multi-arch manifest for canary
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
run: |
IMAGE_NAME=perplexica
docker manifest create itzcrazykns1337/${IMAGE_NAME}:canary \
--amend itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
--amend itzcrazykns1337/${IMAGE_NAME}:canary-arm64
docker manifest push itzcrazykns1337/${IMAGE_NAME}:canary
- name: Create and push multi-arch manifest for releases - name: Create and push multi-arch manifest for releases
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |

View File

@@ -15,9 +15,6 @@ COPY drizzle ./drizzle
RUN mkdir -p /home/perplexica/data RUN mkdir -p /home/perplexica/data
RUN yarn build RUN yarn build
RUN yarn add --dev @vercel/ncc
RUN yarn ncc build ./src/lib/db/migrate.ts -o migrator
FROM node:24.5.0-slim FROM node:24.5.0-slim
RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/* RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/*
@@ -30,8 +27,6 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static
COPY --from=builder /home/perplexica/.next/standalone ./ COPY --from=builder /home/perplexica/.next/standalone ./
COPY --from=builder /home/perplexica/data ./data COPY --from=builder /home/perplexica/data ./data
COPY drizzle ./drizzle COPY drizzle ./drizzle
COPY --from=builder /home/perplexica/migrator/build ./build
COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js
RUN mkdir /home/perplexica/uploads RUN mkdir /home/perplexica/uploads

View File

@@ -1,6 +1,4 @@
#!/bin/sh #!/bin/sh
set -e set -e
node migrate.js
exec node server.js exec node server.js

View File

@@ -1,18 +1,18 @@
{ {
"name": "perplexica-frontend", "name": "perplexica-frontend",
"version": "1.11.0-rc2", "version": "1.11.0-rc3",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
"build": "npm run db:migrate && next build", "build": "next build",
"start": "next start", "start": "next start",
"lint": "next lint", "lint": "next lint",
"format:write": "prettier . --write", "format:write": "prettier . --write"
"db:migrate": "node ./src/lib/db/migrate.ts"
}, },
"dependencies": { "dependencies": {
"@headlessui/react": "^2.2.0", "@headlessui/react": "^2.2.0",
"@headlessui/tailwindcss": "^0.2.2",
"@iarna/toml": "^2.2.5", "@iarna/toml": "^2.2.5",
"@icons-pack/react-simple-icons": "^12.3.0", "@icons-pack/react-simple-icons": "^12.3.0",
"@langchain/anthropic": "^0.3.24", "@langchain/anthropic": "^0.3.24",
@@ -29,8 +29,8 @@
"better-sqlite3": "^11.9.1", "better-sqlite3": "^11.9.1",
"clsx": "^2.1.0", "clsx": "^2.1.0",
"compute-cosine-similarity": "^1.1.0", "compute-cosine-similarity": "^1.1.0",
"compute-dot": "^1.1.0",
"drizzle-orm": "^0.40.1", "drizzle-orm": "^0.40.1",
"framer-motion": "^12.23.24",
"html-to-text": "^9.0.5", "html-to-text": "^9.0.5",
"jspdf": "^3.0.1", "jspdf": "^3.0.1",
"langchain": "^0.3.30", "langchain": "^0.3.30",
@@ -65,6 +65,7 @@
"postcss": "^8", "postcss": "^8",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"tailwindcss": "^3.3.0", "tailwindcss": "^3.3.0",
"ts-node": "^10.9.2",
"typescript": "^5" "typescript": "^5"
} }
} }

View File

@@ -1,23 +1,14 @@
import crypto from 'crypto'; import crypto from 'crypto';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { EventEmitter } from 'stream'; import { EventEmitter } from 'stream';
import {
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
import db from '@/lib/db'; import db from '@/lib/db';
import { chats, messages as messagesSchema } from '@/lib/db/schema'; import { chats, messages as messagesSchema } from '@/lib/db/schema';
import { and, eq, gt } from 'drizzle-orm'; import { and, eq, gt } from 'drizzle-orm';
import { getFileDetails } from '@/lib/utils/files'; import { getFileDetails } from '@/lib/utils/files';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatOpenAI } from '@langchain/openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '@/lib/config';
import { searchHandlers } from '@/lib/search'; import { searchHandlers } from '@/lib/search';
import { z } from 'zod'; import { z } from 'zod';
import ModelRegistry from '@/lib/models/registry';
import { ModelWithProvider } from '@/lib/models/types';
export const runtime = 'nodejs'; export const runtime = 'nodejs';
export const dynamic = 'force-dynamic'; export const dynamic = 'force-dynamic';
@@ -28,14 +19,30 @@ const messageSchema = z.object({
content: z.string().min(1, 'Message content is required'), content: z.string().min(1, 'Message content is required'),
}); });
const chatModelSchema = z.object({ const chatModelSchema: z.ZodType<ModelWithProvider> = z.object({
provider: z.string().optional(), providerId: z.string({
name: z.string().optional(), errorMap: () => ({
message: 'Chat model provider id must be provided',
}),
}),
key: z.string({
errorMap: () => ({
message: 'Chat model key must be provided',
}),
}),
}); });
const embeddingModelSchema = z.object({ const embeddingModelSchema: z.ZodType<ModelWithProvider> = z.object({
provider: z.string().optional(), providerId: z.string({
name: z.string().optional(), errorMap: () => ({
message: 'Embedding model provider id must be provided',
}),
}),
key: z.string({
errorMap: () => ({
message: 'Embedding model key must be provided',
}),
}),
}); });
const bodySchema = z.object({ const bodySchema = z.object({
@@ -57,8 +64,8 @@ const bodySchema = z.object({
.optional() .optional()
.default([]), .default([]),
files: z.array(z.string()).optional().default([]), files: z.array(z.string()).optional().default([]),
chatModel: chatModelSchema.optional().default({}), chatModel: chatModelSchema,
embeddingModel: embeddingModelSchema.optional().default({}), embeddingModel: embeddingModelSchema,
systemInstructions: z.string().nullable().optional().default(''), systemInstructions: z.string().nullable().optional().default(''),
}); });
@@ -248,56 +255,16 @@ export const POST = async (req: Request) => {
); );
} }
const [chatModelProviders, embeddingModelProviders] = await Promise.all([ const registry = new ModelRegistry();
getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(), const [llm, embedding] = await Promise.all([
registry.loadChatModel(body.chatModel.providerId, body.chatModel.key),
registry.loadEmbeddingModel(
body.embeddingModel.providerId,
body.embeddingModel.key,
),
]); ]);
const chatModelProvider =
chatModelProviders[
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
];
const chatModel =
chatModelProvider[
body.chatModel?.name || Object.keys(chatModelProvider)[0]
];
const embeddingProvider =
embeddingModelProviders[
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0]
];
const embeddingModel =
embeddingProvider[
body.embeddingModel?.name || Object.keys(embeddingProvider)[0]
];
let llm: BaseChatModel | undefined;
let embedding = embeddingModel.model;
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
}
if (!llm) {
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
}
if (!embedding) {
return Response.json(
{ error: 'Invalid embedding model' },
{ status: 400 },
);
}
const humanMessageId = const humanMessageId =
message.messageId ?? crypto.randomBytes(7).toString('hex'); message.messageId ?? crypto.randomBytes(7).toString('hex');

View File

@@ -1,134 +1,33 @@
import { import configManager from '@/lib/config';
getAnthropicApiKey, import ModelRegistry from '@/lib/models/registry';
getCustomOpenaiApiKey, import { NextRequest, NextResponse } from 'next/server';
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getGeminiApiKey,
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
getDeepseekApiKey,
getAimlApiKey,
getLMStudioApiEndpoint,
getLemonadeApiEndpoint,
getLemonadeApiKey,
updateConfig,
getOllamaApiKey,
} from '@/lib/config';
import {
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
export const GET = async (req: Request) => { export const GET = async (req: NextRequest) => {
try { try {
const config: Record<string, any> = {}; const values = configManager.currentConfig;
const fields = configManager.getUIConfigSections();
const [chatModelProviders, embeddingModelProviders] = await Promise.all([ const modelRegistry = new ModelRegistry();
getAvailableChatModelProviders(), const modelProviders = await modelRegistry.getActiveProviders();
getAvailableEmbeddingModelProviders(),
]);
config['chatModelProviders'] = {}; values.modelProviders = values.modelProviders.map((mp) => {
config['embeddingModelProviders'] = {}; const activeProvider = modelProviders.find((p) => p.id === mp.id)
for (const provider in chatModelProviders) { return {
config['chatModelProviders'][provider] = Object.keys( ...mp,
chatModelProviders[provider], chatModels: activeProvider?.chatModels ?? mp.chatModels,
).map((model) => { embeddingModels: activeProvider?.embeddingModels ?? mp.embeddingModels
return { }
name: model, })
displayName: chatModelProviders[provider][model].displayName,
};
});
}
for (const provider in embeddingModelProviders) { return NextResponse.json({
config['embeddingModelProviders'][provider] = Object.keys( values,
embeddingModelProviders[provider], fields,
).map((model) => { })
return {
name: model,
displayName: embeddingModelProviders[provider][model].displayName,
};
});
}
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['lemonadeApiUrl'] = getLemonadeApiEndpoint();
config['lemonadeApiKey'] = getLemonadeApiKey();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey();
config['deepseekApiKey'] = getDeepseekApiKey();
config['aimlApiKey'] = getAimlApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
return Response.json({ ...config }, { status: 200 });
} catch (err) { } catch (err) {
console.error('An error occurred while getting config:', err); console.error('Error in getting config: ', err);
return Response.json( return Response.json(
{ message: 'An error occurred while getting config' }, { message: 'An error has occurred.' },
{ status: 500 },
);
}
};
export const POST = async (req: Request) => {
try {
const config = await req.json();
const updatedConfig = {
MODELS: {
OPENAI: {
API_KEY: config.openaiApiKey,
},
GROQ: {
API_KEY: config.groqApiKey,
},
ANTHROPIC: {
API_KEY: config.anthropicApiKey,
},
GEMINI: {
API_KEY: config.geminiApiKey,
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
API_KEY: config.ollamaApiKey,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,
},
AIMLAPI: {
API_KEY: config.aimlApiKey,
},
LM_STUDIO: {
API_URL: config.lmStudioApiUrl,
},
LEMONADE: {
API_URL: config.lemonadeApiUrl,
API_KEY: config.lemonadeApiKey,
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,
MODEL_NAME: config.customOpenaiModelName,
},
},
};
updateConfig(updatedConfig);
return Response.json({ message: 'Config updated' }, { status: 200 });
} catch (err) {
console.error('An error occurred while updating config:', err);
return Response.json(
{ message: 'An error occurred while updating config' },
{ status: 500 }, { status: 500 },
); );
} }

View File

@@ -1,23 +1,12 @@
import handleImageSearch from '@/lib/chains/imageSearchAgent'; import handleImageSearch from '@/lib/chains/imageSearchAgent';
import { import ModelRegistry from '@/lib/models/registry';
getCustomOpenaiApiKey, import { ModelWithProvider } from '@/lib/models/types';
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '@/lib/config';
import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOpenAI } from '@langchain/openai';
interface ChatModel {
provider: string;
model: string;
}
interface ImageSearchBody { interface ImageSearchBody {
query: string; query: string;
chatHistory: any[]; chatHistory: any[];
chatModel?: ChatModel; chatModel: ModelWithProvider;
} }
export const POST = async (req: Request) => { export const POST = async (req: Request) => {
@@ -34,35 +23,12 @@ export const POST = async (req: Request) => {
}) })
.filter((msg) => msg !== undefined) as BaseMessage[]; .filter((msg) => msg !== undefined) as BaseMessage[];
const chatModelProviders = await getAvailableChatModelProviders(); const registry = new ModelRegistry();
const chatModelProvider = const llm = await registry.loadChatModel(
chatModelProviders[ body.chatModel.providerId,
body.chatModel?.provider || Object.keys(chatModelProviders)[0] body.chatModel.key,
]; );
const chatModel =
chatModelProvider[
body.chatModel?.model || Object.keys(chatModelProvider)[0]
];
let llm: BaseChatModel | undefined;
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
}
if (!llm) {
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
}
const images = await handleImageSearch( const images = await handleImageSearch(
{ {

View File

@@ -1,47 +0,0 @@
import {
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
export const GET = async (req: Request) => {
try {
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(),
]);
Object.keys(chatModelProviders).forEach((provider) => {
Object.keys(chatModelProviders[provider]).forEach((model) => {
delete (chatModelProviders[provider][model] as { model?: unknown })
.model;
});
});
Object.keys(embeddingModelProviders).forEach((provider) => {
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
delete (embeddingModelProviders[provider][model] as { model?: unknown })
.model;
});
});
return Response.json(
{
chatModelProviders,
embeddingModelProviders,
},
{
status: 200,
},
);
} catch (err) {
console.error('An error occurred while fetching models', err);
return Response.json(
{
message: 'An error has occurred.',
},
{
status: 500,
},
);
}
};

View File

@@ -0,0 +1,28 @@
import ModelRegistry from '@/lib/models/registry';
export const GET = async (req: Request) => {
try {
const registry = new ModelRegistry();
const activeProviders = await registry.getActiveProviders();
return Response.json(
{
providers: activeProviders,
},
{
status: 200,
},
);
} catch (err) {
console.error('An error occurred while fetching providers', err);
return Response.json(
{
message: 'An error has occurred.',
},
{
status: 500,
},
);
}
};

View File

@@ -1,22 +1,12 @@
import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent'; import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent';
import { import ModelRegistry from '@/lib/models/registry';
getCustomOpenaiApiKey, import { ModelWithProvider } from '@/lib/models/types';
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '@/lib/config';
import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOpenAI } from '@langchain/openai';
interface ChatModel {
provider: string;
model: string;
}
interface SuggestionsGenerationBody { interface SuggestionsGenerationBody {
chatHistory: any[]; chatHistory: any[];
chatModel?: ChatModel; chatModel: ModelWithProvider;
} }
export const POST = async (req: Request) => { export const POST = async (req: Request) => {
@@ -33,35 +23,12 @@ export const POST = async (req: Request) => {
}) })
.filter((msg) => msg !== undefined) as BaseMessage[]; .filter((msg) => msg !== undefined) as BaseMessage[];
const chatModelProviders = await getAvailableChatModelProviders(); const registry = new ModelRegistry();
const chatModelProvider = const llm = await registry.loadChatModel(
chatModelProviders[ body.chatModel.providerId,
body.chatModel?.provider || Object.keys(chatModelProviders)[0] body.chatModel.key,
]; );
const chatModel =
chatModelProvider[
body.chatModel?.model || Object.keys(chatModelProvider)[0]
];
let llm: BaseChatModel | undefined;
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
}
if (!llm) {
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
}
const suggestions = await generateSuggestions( const suggestions = await generateSuggestions(
{ {

View File

@@ -1,23 +1,12 @@
import handleVideoSearch from '@/lib/chains/videoSearchAgent'; import handleVideoSearch from '@/lib/chains/videoSearchAgent';
import { import ModelRegistry from '@/lib/models/registry';
getCustomOpenaiApiKey, import { ModelWithProvider } from '@/lib/models/types';
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '@/lib/config';
import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOpenAI } from '@langchain/openai';
interface ChatModel {
provider: string;
model: string;
}
interface VideoSearchBody { interface VideoSearchBody {
query: string; query: string;
chatHistory: any[]; chatHistory: any[];
chatModel?: ChatModel; chatModel: ModelWithProvider;
} }
export const POST = async (req: Request) => { export const POST = async (req: Request) => {
@@ -34,35 +23,12 @@ export const POST = async (req: Request) => {
}) })
.filter((msg) => msg !== undefined) as BaseMessage[]; .filter((msg) => msg !== undefined) as BaseMessage[];
const chatModelProviders = await getAvailableChatModelProviders(); const registry = new ModelRegistry();
const chatModelProvider = const llm = await registry.loadChatModel(
chatModelProviders[ body.chatModel.providerId,
body.chatModel?.provider || Object.keys(chatModelProviders)[0] body.chatModel.key,
]; );
const chatModel =
chatModelProvider[
body.chatModel?.model || Object.keys(chatModelProvider)[0]
];
let llm: BaseChatModel | undefined;
if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({
apiKey: getCustomOpenaiApiKey(),
modelName: getCustomOpenaiModelName(),
temperature: 0.7,
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
}
if (!llm) {
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
}
const videos = await handleVideoSearch( const videos = await handleVideoSearch(
{ {

View File

@@ -63,7 +63,7 @@ const Focus = () => {
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]"> <Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
<PopoverButton <PopoverButton
type="button" type="button"
className=" text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white" className="active:border-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
> >
{focusMode !== 'webSearch' ? ( {focusMode !== 'webSearch' ? (
<div className="flex flex-row items-center space-x-1"> <div className="flex flex-row items-center space-x-1">

View File

@@ -33,11 +33,10 @@ const SearchImages = ({
onClick={async () => { onClick={async () => {
setLoading(true); setLoading(true);
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem(
const chatModel = localStorage.getItem('chatModel'); 'chatModelProviderId',
);
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); const chatModel = localStorage.getItem('chatModelKey');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const res = await fetch(`/api/images`, { const res = await fetch(`/api/images`, {
method: 'POST', method: 'POST',
@@ -48,12 +47,8 @@ const SearchImages = ({
query: query, query: query,
chatHistory: chatHistory, chatHistory: chatHistory,
chatModel: { chatModel: {
provider: chatModelProvider, providerId: chatModelProvider,
model: chatModel, key: chatModel,
...(chatModelProvider === 'custom_openai' && {
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
}, },
}), }),
}); });

View File

@@ -48,11 +48,10 @@ const Searchvideos = ({
onClick={async () => { onClick={async () => {
setLoading(true); setLoading(true);
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem(
const chatModel = localStorage.getItem('chatModel'); 'chatModelProviderId',
);
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); const chatModel = localStorage.getItem('chatModelKey');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const res = await fetch(`/api/videos`, { const res = await fetch(`/api/videos`, {
method: 'POST', method: 'POST',
@@ -63,12 +62,8 @@ const Searchvideos = ({
query: query, query: query,
chatHistory: chatHistory, chatHistory: chatHistory,
chatModel: { chatModel: {
provider: chatModelProvider, providerId: chatModelProvider,
model: chatModel, key: chatModel,
...(chatModelProvider === 'custom_openai' && {
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
}, },
}), }),
}); });

View File

@@ -70,7 +70,7 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
</div> </div>
</div> </div>
<div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-primary dark:bg-dark-primary px-4 py-4 shadow-sm lg:hidden"> <div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-secondary dark:bg-dark-secondary px-4 py-4 shadow-sm lg:hidden">
{navLinks.map((link, i) => ( {navLinks.map((link, i) => (
<Link <Link
href={link.href} href={link.href}

13
src/instrumentation.ts Normal file
View File

@@ -0,0 +1,13 @@
export const register = async () => {
if (process.env.NEXT_RUNTIME === 'nodejs') {
try {
console.log('Running database migrations...');
await import('./lib/db/migrate');
console.log('Database migrations completed successfully');
} catch (error) {
console.error('Failed to run database migrations:', error);
}
await import('./lib/config/index');
}
};

View File

@@ -1,11 +1,8 @@
import { Message } from '@/components/ChatWindow'; import { Message } from '@/components/ChatWindow';
export const getSuggestions = async (chatHistory: Message[]) => { export const getSuggestions = async (chatHistory: Message[]) => {
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModelKey');
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProviderId');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const res = await fetch(`/api/suggestions`, { const res = await fetch(`/api/suggestions`, {
method: 'POST', method: 'POST',
@@ -15,12 +12,8 @@ export const getSuggestions = async (chatHistory: Message[]) => {
body: JSON.stringify({ body: JSON.stringify({
chatHistory: chatHistory, chatHistory: chatHistory,
chatModel: { chatModel: {
provider: chatModelProvider, providerId: chatModelProvider,
model: chatModel, key: chatModel,
...(chatModelProvider === 'custom_openai' && {
customOpenAIKey,
customOpenAIBaseURL,
}),
}, },
}), }),
}); });

View File

@@ -1,158 +0,0 @@
import toml from '@iarna/toml';
// Use dynamic imports for Node.js modules to prevent client-side errors
let fs: any;
let path: any;
if (typeof window === 'undefined') {
// We're on the server
fs = require('fs');
path = require('path');
}
const configFileName = 'config.toml';
interface Config {
GENERAL: {
SIMILARITY_MEASURE: string;
KEEP_ALIVE: string;
};
MODELS: {
OPENAI: {
API_KEY: string;
};
GROQ: {
API_KEY: string;
};
ANTHROPIC: {
API_KEY: string;
};
GEMINI: {
API_KEY: string;
};
OLLAMA: {
API_URL: string;
API_KEY: string;
};
DEEPSEEK: {
API_KEY: string;
};
AIMLAPI: {
API_KEY: string;
};
LM_STUDIO: {
API_URL: string;
};
LEMONADE: {
API_URL: string;
API_KEY: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
MODEL_NAME: string;
};
};
API_ENDPOINTS: {
SEARXNG: string;
};
}
type RecursivePartial<T> = {
[P in keyof T]?: RecursivePartial<T[P]>;
};
const loadConfig = () => {
// Server-side only
if (typeof window === 'undefined') {
return toml.parse(
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
) as any as Config;
}
// Client-side fallback - settings will be loaded via API
return {} as Config;
};
export const getSimilarityMeasure = () =>
loadConfig().GENERAL.SIMILARITY_MEASURE;
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
export const getSearxngApiEndpoint = () =>
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
export const getCustomOpenaiApiKey = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
export const getCustomOpenaiApiUrl = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
export const getCustomOpenaiModelName = () =>
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
export const getLMStudioApiEndpoint = () =>
loadConfig().MODELS.LM_STUDIO.API_URL;
export const getLemonadeApiEndpoint = () =>
loadConfig().MODELS.LEMONADE.API_URL;
export const getLemonadeApiKey = () => loadConfig().MODELS.LEMONADE.API_KEY;
const mergeConfigs = (current: any, update: any): any => {
if (update === null || update === undefined) {
return current;
}
if (typeof current !== 'object' || current === null) {
return update;
}
const result = { ...current };
for (const key in update) {
if (Object.prototype.hasOwnProperty.call(update, key)) {
const updateValue = update[key];
if (
typeof updateValue === 'object' &&
updateValue !== null &&
typeof result[key] === 'object' &&
result[key] !== null
) {
result[key] = mergeConfigs(result[key], updateValue);
} else if (updateValue !== undefined) {
result[key] = updateValue;
}
}
}
return result;
};
export const updateConfig = (config: RecursivePartial<Config>) => {
// Server-side only
if (typeof window === 'undefined') {
const currentConfig = loadConfig();
const mergedConfig = mergeConfigs(currentConfig, config);
fs.writeFileSync(
path.join(path.join(process.cwd(), `${configFileName}`)),
toml.stringify(mergedConfig),
);
}
};

View File

@@ -0,0 +1,16 @@
'use client';
const getClientConfig = (key: string, defaultVal?: any) => {
return localStorage.getItem(key) ?? defaultVal ?? undefined;
};
export const getTheme = () => getClientConfig('theme', 'dark');
export const getAutoImageSearch = () =>
Boolean(getClientConfig('autoImageSearch', 'true'));
export const getAutoVideoSearch = () =>
Boolean(getClientConfig('autoVideoSearch', 'true'));
export const getSystemInstructions = () =>
getClientConfig('systemInstructions', '');

242
src/lib/config/index.ts Normal file
View File

@@ -0,0 +1,242 @@
import path from 'node:path';
import fs from 'fs';
import { Config, ConfigModelProvider, UIConfigSections } from './types';
import { hashObj } from '../serverUtils';
import { getModelProvidersUIConfigSection } from '../models/providers';
class ConfigManager {
configPath: string = path.join(
process.env.DATA_DIR || process.cwd(),
'/data/config.json',
);
configVersion = 1;
currentConfig: Config = {
version: this.configVersion,
setupComplete: false,
general: {},
modelProviders: [],
search: {
searxngURL: '',
},
};
uiConfigSections: UIConfigSections = {
general: [],
modelProviders: [],
search: [
{
name: 'SearXNG URL',
key: 'searxngURL',
type: 'string',
required: false,
description: 'The URL of your SearXNG instance',
placeholder: 'http://localhost:4000',
default: '',
scope: 'server',
env: 'SEARXNG_API_URL',
},
],
};
constructor() {
this.initialize();
}
private initialize() {
this.initializeConfig();
this.initializeFromEnv();
}
private saveConfig() {
fs.writeFileSync(
this.configPath,
JSON.stringify(this.currentConfig, null, 2),
);
}
private initializeConfig() {
const exists = fs.existsSync(this.configPath);
if (!exists) {
fs.writeFileSync(
this.configPath,
JSON.stringify(this.currentConfig, null, 2),
);
} else {
try {
this.currentConfig = JSON.parse(
fs.readFileSync(this.configPath, 'utf-8'),
);
} catch (err) {
if (err instanceof SyntaxError) {
console.error(
`Error parsing config file at ${this.configPath}:`,
err,
);
console.log(
'Loading default config and overwriting the existing file.',
);
fs.writeFileSync(
this.configPath,
JSON.stringify(this.currentConfig, null, 2),
);
return;
} else {
console.log('Unknown error reading config file:', err);
}
}
this.currentConfig = this.migrateConfig(this.currentConfig);
}
}
private migrateConfig(config: Config): Config {
/* TODO: Add migrations */
return config;
}
private initializeFromEnv() {
/* providers section*/
const providerConfigSections = getModelProvidersUIConfigSection();
this.uiConfigSections.modelProviders = providerConfigSections;
const newProviders: ConfigModelProvider[] = [];
providerConfigSections.forEach((provider) => {
const newProvider: ConfigModelProvider & { required?: string[] } = {
id: crypto.randomUUID(),
name: `${provider.name} ${Math.floor(Math.random() * 1000)}`,
type: provider.key,
chatModels: [],
embeddingModels: [],
config: {},
required: [],
hash: '',
};
provider.fields.forEach((field) => {
newProvider.config[field.key] =
process.env[field.env!] ||
field.default ||
''; /* Env var must exist for providers */
if (field.required) newProvider.required?.push(field.key);
});
let configured = true;
newProvider.required?.forEach((r) => {
if (!newProvider.config[r]) {
configured = false;
}
});
if (configured) {
const hash = hashObj(newProvider.config);
newProvider.hash = hash;
delete newProvider.required;
const exists = this.currentConfig.modelProviders.find(
(p) => p.hash === hash,
);
if (!exists) {
newProviders.push(newProvider);
}
}
});
this.currentConfig.modelProviders.push(...newProviders);
/* search section */
this.uiConfigSections.search.forEach((f) => {
if (f.env && !this.currentConfig.search[f.key]) {
this.currentConfig.search[f.key] =
process.env[f.env] ?? f.default ?? '';
}
});
this.saveConfig();
}
public getConfig(key: string, defaultValue?: any): any {
const nested = key.split('.');
let obj: any = this.currentConfig;
for (let i = 0; i < nested.length; i++) {
const part = nested[i];
if (obj == null) return defaultValue;
obj = obj[part];
}
return obj === undefined ? defaultValue : obj;
}
public updateConfig(key: string, val: any) {
const parts = key.split('.');
if (parts.length === 0) return;
let target: any = this.currentConfig;
for (let i = 0; i < parts.length - 1; i++) {
const part = parts[i];
if (target[part] === null || typeof target[part] !== 'object') {
target[part] = {};
}
target = target[part];
}
const finalKey = parts[parts.length - 1];
target[finalKey] = val;
this.saveConfig();
}
public addModelProvider(type: string, name: string, config: any) {
const newModelProvider: ConfigModelProvider = {
id: crypto.randomUUID(),
name,
type,
config,
chatModels: [],
embeddingModels: [],
hash: hashObj(config),
};
this.currentConfig.modelProviders.push(newModelProvider);
this.saveConfig();
}
public removeModelProvider(id: string) {
const index = this.currentConfig.modelProviders.findIndex(
(p) => p.id === id,
);
if (index === -1) return;
this.currentConfig.modelProviders =
this.currentConfig.modelProviders.filter((p) => p.id !== id);
this.saveConfig();
}
public isSetupComplete() {
return this.currentConfig.setupComplete;
}
public markSetupComplete() {
if (!this.currentConfig.setupComplete) {
this.currentConfig.setupComplete = true;
}
this.saveConfig();
}
public getUIConfigSections(): UIConfigSections {
return this.uiConfigSections;
}
}
const configManager = new ConfigManager();
export default configManager;

View File

@@ -0,0 +1,14 @@
import configManager from './index';
import { ConfigModelProvider } from './types';
export const getConfiguredModelProviders = (): ConfigModelProvider[] => {
return configManager.getConfig('modelProviders', []);
};
export const getConfiguredModelProviderById = (
id: string,
): ConfigModelProvider | undefined => {
return getConfiguredModelProviders().find((p) => p.id === id) ?? undefined;
};
export const getSearxngURL = () => configManager.getConfig('search.searxngURL', '')

89
src/lib/config/types.ts Normal file
View File

@@ -0,0 +1,89 @@
import { Model } from '../models/types';
type BaseUIConfigField = {
name: string;
key: string;
required: boolean;
description: string;
scope: 'client' | 'server';
env?: string;
};
type StringUIConfigField = BaseUIConfigField & {
type: 'string';
placeholder?: string;
default?: string;
};
type SelectUIConfigFieldOptions = {
name: string;
key: string;
value: string;
};
type SelectUIConfigField = BaseUIConfigField & {
type: 'select';
default?: string;
options: SelectUIConfigFieldOptions[];
};
type PasswordUIConfigField = BaseUIConfigField & {
type: 'password';
placeholder?: string;
default?: string;
};
type UIConfigField =
| StringUIConfigField
| SelectUIConfigField
| PasswordUIConfigField;
type ConfigModelProvider = {
id: string;
name: string;
type: string;
chatModels: Model[];
embeddingModels: Model[];
config: { [key: string]: any };
hash: string;
};
type Config = {
version: number;
setupComplete: boolean;
general: {
[key: string]: any;
};
modelProviders: ConfigModelProvider[];
search: {
[key: string]: any
}
};
type EnvMap = {
[key: string]: {
fieldKey: string;
providerKey: string;
};
};
type ModelProviderUISection = {
name: string;
key: string;
fields: UIConfigField[];
};
type UIConfigSections = {
general: UIConfigField[];
modelProviders: ModelProviderUISection[];
search: UIConfigField[];
};
export type {
UIConfigField,
Config,
EnvMap,
UIConfigSections,
ModelProviderUISection,
ConfigModelProvider,
};

View File

@@ -2,9 +2,12 @@ import Database from 'better-sqlite3';
import path from 'path'; import path from 'path';
import fs from 'fs'; import fs from 'fs';
const db = new Database(path.join(process.cwd(), 'data', 'db.sqlite')); const DATA_DIR = process.env.DATA_DIR || process.cwd();
const dbPath = path.join(DATA_DIR, './data/db.sqlite');
const migrationsFolder = path.join(process.cwd(), 'drizzle'); const db = new Database(dbPath);
const migrationsFolder = path.join(DATA_DIR, 'drizzle');
db.exec(` db.exec(`
CREATE TABLE IF NOT EXISTS ran_migrations ( CREATE TABLE IF NOT EXISTS ran_migrations (
@@ -54,7 +57,7 @@ fs.readdirSync(migrationsFolder)
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
type TEXT NOT NULL, type TEXT NOT NULL,
chatId TEXT NOT NULL, chatId TEXT NOT NULL,
createdAt TEXT NOT NULL, createdAt TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
messageId TEXT NOT NULL, messageId TEXT NOT NULL,
content TEXT, content TEXT,
sources TEXT DEFAULT '[]' sources TEXT DEFAULT '[]'
@@ -67,8 +70,10 @@ fs.readdirSync(migrationsFolder)
`); `);
messages.forEach((msg: any) => { messages.forEach((msg: any) => {
if (msg.type === 'user') { while (typeof msg.metadata === 'string') {
msg.metadata = JSON.parse(msg.metadata || '{}'); msg.metadata = JSON.parse(msg.metadata || '{}');
}
if (msg.type === 'user') {
insertMessage.run( insertMessage.run(
'user', 'user',
msg.chatId, msg.chatId,
@@ -78,7 +83,6 @@ fs.readdirSync(migrationsFolder)
'[]', '[]',
); );
} else if (msg.type === 'assistant') { } else if (msg.type === 'assistant') {
msg.metadata = JSON.parse(msg.metadata || '{}');
insertMessage.run( insertMessage.run(
'assistant', 'assistant',
msg.chatId, msg.chatId,

View File

@@ -20,6 +20,7 @@ import crypto from 'crypto';
import { useSearchParams } from 'next/navigation'; import { useSearchParams } from 'next/navigation';
import { toast } from 'sonner'; import { toast } from 'sonner';
import { getSuggestions } from '../actions'; import { getSuggestions } from '../actions';
import { MinimalProvider } from '../models/types';
export type Section = { export type Section = {
userMessage: UserMessage; userMessage: UserMessage;
@@ -66,13 +67,13 @@ export interface File {
} }
interface ChatModelProvider { interface ChatModelProvider {
name: string; key: string;
provider: string; providerId: string;
} }
interface EmbeddingModelProvider { interface EmbeddingModelProvider {
name: string; key: string;
provider: string; providerId: string;
} }
const checkConfig = async ( const checkConfig = async (
@@ -82,10 +83,12 @@ const checkConfig = async (
setHasError: (hasError: boolean) => void, setHasError: (hasError: boolean) => void,
) => { ) => {
try { try {
let chatModel = localStorage.getItem('chatModel'); let chatModelKey = localStorage.getItem('chatModelKey');
let chatModelProvider = localStorage.getItem('chatModelProvider'); let chatModelProviderId = localStorage.getItem('chatModelProviderId');
let embeddingModel = localStorage.getItem('embeddingModel'); let embeddingModelKey = localStorage.getItem('embeddingModelKey');
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider'); let embeddingModelProviderId = localStorage.getItem(
'embeddingModelProviderId',
);
const autoImageSearch = localStorage.getItem('autoImageSearch'); const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch'); const autoVideoSearch = localStorage.getItem('autoVideoSearch');
@@ -98,145 +101,81 @@ const checkConfig = async (
localStorage.setItem('autoVideoSearch', 'false'); localStorage.setItem('autoVideoSearch', 'false');
} }
const providers = await fetch(`/api/models`, { const res = await fetch(`/api/providers`, {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
}).then(async (res) => {
if (!res.ok)
throw new Error(
`Failed to fetch models: ${res.status} ${res.statusText}`,
);
return res.json();
}); });
if ( if (!res.ok) {
!chatModel || throw new Error(
!chatModelProvider || `Provider fetching failed with status code ${res.status}`,
!embeddingModel || );
!embeddingModelProvider
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
}
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
)
return toast.error('No embedding models available');
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
}
localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider);
localStorage.setItem('embeddingModel', embeddingModel!);
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
} else {
const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders;
if (
Object.keys(chatModelProviders).length > 0 &&
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvidersKeys.find(
(key) => Object.keys(chatModelProviders[key]).length > 0,
) || chatModelProvidersKeys[0];
localStorage.setItem('chatModelProvider', chatModelProvider);
}
if (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
? chatModelProvider
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}
if (
Object.keys(embeddingModelProviders).length > 0 &&
!embeddingModelProviders[embeddingModelProvider]
) {
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
}
if (
embeddingModelProvider &&
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
) {
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
localStorage.setItem('embeddingModel', embeddingModel);
}
} }
const data = await res.json();
const providers: MinimalProvider[] = data.providers;
if (providers.length === 0) {
throw new Error(
'No chat model providers found, please configure them in the settings page.',
);
}
const chatModelProvider =
providers.find((p) => p.id === chatModelProviderId) ??
providers.find((p) => p.chatModels.length > 0);
if (!chatModelProvider) {
throw new Error(
'No chat models found, pleae configure them in the settings page.',
);
}
chatModelProviderId = chatModelProvider.id;
const chatModel =
chatModelProvider.chatModels.find((m) => m.key === chatModelKey) ??
chatModelProvider.chatModels[0];
chatModelKey = chatModel.key;
const embeddingModelProvider =
providers.find((p) => p.id === embeddingModelProviderId) ??
providers.find((p) => p.embeddingModels.length > 0);
if (!embeddingModelProvider) {
throw new Error(
'No embedding models found, pleae configure them in the settings page.',
);
}
embeddingModelProviderId = embeddingModelProvider.id;
const embeddingModel =
embeddingModelProvider.embeddingModels.find(
(m) => m.key === embeddingModelKey,
) ?? embeddingModelProvider.embeddingModels[0];
embeddingModelKey = embeddingModel.key;
localStorage.setItem('chatModelKey', chatModelKey);
localStorage.setItem('chatModelProviderId', chatModelProviderId);
localStorage.setItem('embeddingModelKey', embeddingModelKey);
localStorage.setItem('embeddingModelProviderId', embeddingModelProviderId);
setChatModelProvider({ setChatModelProvider({
name: chatModel!, key: chatModelKey,
provider: chatModelProvider, providerId: chatModelProviderId,
}); });
setEmbeddingModelProvider({ setEmbeddingModelProvider({
name: embeddingModel!, key: embeddingModelKey,
provider: embeddingModelProvider, providerId: embeddingModelProviderId,
}); });
setIsConfigReady(true); setIsConfigReady(true);
} catch (err) { } catch (err: any) {
console.error('An error occurred while checking the configuration:', err); console.error('An error occurred while checking the configuration:', err);
toast.error(err.message);
setIsConfigReady(false); setIsConfigReady(false);
setHasError(true); setHasError(true);
} }
@@ -356,15 +295,15 @@ export const ChatProvider = ({
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>( const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
{ {
name: '', key: '',
provider: '', providerId: '',
}, },
); );
const [embeddingModelProvider, setEmbeddingModelProvider] = const [embeddingModelProvider, setEmbeddingModelProvider] =
useState<EmbeddingModelProvider>({ useState<EmbeddingModelProvider>({
name: '', key: '',
provider: '', providerId: '',
}); });
const [isConfigReady, setIsConfigReady] = useState(false); const [isConfigReady, setIsConfigReady] = useState(false);
@@ -742,12 +681,12 @@ export const ChatProvider = ({
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex) ? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
: chatHistory, : chatHistory,
chatModel: { chatModel: {
name: chatModelProvider.name, key: chatModelProvider.key,
provider: chatModelProvider.provider, providerId: chatModelProvider.providerId,
}, },
embeddingModel: { embeddingModel: {
name: embeddingModelProvider.name, key: embeddingModelProvider.key,
provider: embeddingModelProvider.provider, providerId: embeddingModelProvider.providerId,
}, },
systemInstructions: localStorage.getItem('systemInstructions'), systemInstructions: localStorage.getItem('systemInstructions'),
}), }),

View File

@@ -0,0 +1,45 @@
import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import { UIConfigField } from '@/lib/config/types';
abstract class BaseModelProvider<CONFIG> {
constructor(
protected id: string,
protected name: string,
protected config: CONFIG,
) {}
abstract getDefaultModels(): Promise<ModelList>;
abstract getModelList(): Promise<ModelList>;
abstract loadChatModel(modelName: string): Promise<BaseChatModel>;
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>;
static getProviderConfigFields(): UIConfigField[] {
throw new Error('Method not implemented.');
}
static getProviderMetadata(): ProviderMetadata {
throw new Error('Method not Implemented.');
}
static parseAndValidate(raw: any): any {
/* Static methods can't access class type parameters */
throw new Error('Method not Implemented.');
}
}
export type ProviderConstructor<CONFIG> = {
new (id: string, name: string, config: CONFIG): BaseModelProvider<CONFIG>;
parseAndValidate(raw: any): CONFIG;
getProviderConfigFields: () => UIConfigField[];
getProviderMetadata: () => ProviderMetadata;
};
export const createProviderInstance = <P extends ProviderConstructor<any>>(
Provider: P,
id: string,
name: string,
rawConfig: unknown,
): InstanceType<P> => {
const cfg = Provider.parseAndValidate(rawConfig);
return new Provider(id, name, cfg) as InstanceType<P>;
};
export default BaseModelProvider;

View File

@@ -0,0 +1,21 @@
import { ModelProviderUISection } from '@/lib/config/types';
import { ProviderConstructor } from './baseProvider';
import OpenAIProvider from './openai';
export const providers: Record<string, ProviderConstructor<any>> = {
openai: OpenAIProvider,
};
export const getModelProvidersUIConfigSection =
(): ModelProviderUISection[] => {
return Object.entries(providers).map(([k, p]) => {
const configFields = p.getProviderConfigFields();
const metadata = p.getProviderMetadata();
return {
fields: configFields,
key: k,
name: metadata.name,
};
});
};

View File

@@ -0,0 +1,214 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import BaseModelProvider from './baseProvider';
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
interface OpenAIConfig {
apiKey: string;
baseURL: string;
}
const defaultChatModels: Model[] = [
{
name: 'GPT-3.5 Turbo',
key: 'gpt-3.5-turbo',
},
{
name: 'GPT-4',
key: 'gpt-4',
},
{
name: 'GPT-4 turbo',
key: 'gpt-4-turbo',
},
{
name: 'GPT-4 omni',
key: 'gpt-4o',
},
{
name: 'GPT-4o (2024-05-13)',
key: 'gpt-4o-2024-05-13',
},
{
name: 'GPT-4 omni mini',
key: 'gpt-4o-mini',
},
{
name: 'GPT 4.1 nano',
key: 'gpt-4.1-nano',
},
{
name: 'GPT 4.1 mini',
key: 'gpt-4.1-mini',
},
{
name: 'GPT 4.1',
key: 'gpt-4.1',
},
{
name: 'GPT 5 nano',
key: 'gpt-5-nano',
},
{
name: 'GPT 5',
key: 'gpt-5',
},
{
name: 'GPT 5 Mini',
key: 'gpt-5-mini',
},
{
name: 'o1',
key: 'o1',
},
{
name: 'o3',
key: 'o3',
},
{
name: 'o3 Mini',
key: 'o3-mini',
},
{
name: 'o4 Mini',
key: 'o4-mini',
},
];
const defaultEmbeddingModels: Model[] = [
{
name: 'Text Embedding 3 Small',
key: 'text-embedding-3-small',
},
{
name: 'Text Embedding 3 Large',
key: 'text-embedding-3-large',
},
];
const providerConfigFields: UIConfigField[] = [
{
type: 'password',
name: 'API Key',
key: 'apiKey',
description: 'Your OpenAI API key',
required: true,
placeholder: 'OpenAI API Key',
env: 'OPENAI_API_KEY',
scope: 'server',
},
{
type: 'string',
name: 'Base URL',
key: 'baseURL',
description: 'The base URL for the OpenAI API',
required: true,
placeholder: 'OpenAI Base URL',
default: 'https://api.openai.com/v1',
env: 'OPENAI_BASE_URL',
scope: 'server',
},
];
class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
constructor(id: string, name: string, config: OpenAIConfig) {
super(id, name, config);
}
async getDefaultModels(): Promise<ModelList> {
if (this.config.baseURL === 'https://api.openai.com/v1') {
return {
embedding: defaultEmbeddingModels,
chat: defaultChatModels,
};
}
return {
embedding: [],
chat: [],
};
}
async getModelList(): Promise<ModelList> {
const defaultModels = await this.getDefaultModels();
const configProvider = getConfiguredModelProviderById(this.id)!;
return {
embedding: [
...defaultModels.embedding,
...configProvider.embeddingModels,
],
chat: [...defaultModels.chat, ...configProvider.chatModels],
};
}
async loadChatModel(key: string): Promise<BaseChatModel> {
const modelList = await this.getModelList();
const exists = modelList.chat.find((m) => m.key === key);
if (!exists) {
throw new Error(
'Error Loading OpenAI Chat Model. Invalid Model Selected',
);
}
return new ChatOpenAI({
apiKey: this.config.apiKey,
temperature: 0.7,
model: key,
configuration: {
baseURL: this.config.baseURL,
},
});
}
async loadEmbeddingModel(key: string): Promise<Embeddings> {
const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key);
if (!exists) {
throw new Error(
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
);
}
return new OpenAIEmbeddings({
apiKey: this.config.apiKey,
model: key,
configuration: {
baseURL: this.config.baseURL,
},
});
}
static parseAndValidate(raw: any): OpenAIConfig {
if (!raw || typeof raw !== 'object')
throw new Error('Invalid config provided. Expected object');
if (!raw.apiKey || !raw.baseURL)
throw new Error(
'Invalid config provided. API key and base URL must be provided',
);
return {
apiKey: String(raw.apiKey),
baseURL: String(raw.baseURL),
};
}
static getProviderConfigFields(): UIConfigField[] {
return providerConfigFields;
}
static getProviderMetadata(): ProviderMetadata {
return {
key: 'openai',
name: 'OpenAI',
};
}
}
export default OpenAIProvider;

View File

@@ -0,0 +1,78 @@
import { ConfigModelProvider } from '../config/types';
import BaseModelProvider, {
createProviderInstance,
} from './providers/baseProvider';
import { getConfiguredModelProviders } from '../config/serverRegistry';
import { providers } from './providers';
import { MinimalProvider, Model } from './types';
class ModelRegistry {
activeProviders: (ConfigModelProvider & {
provider: BaseModelProvider<any>;
})[] = [];
constructor() {
this.initializeActiveProviders();
}
private initializeActiveProviders() {
const configuredProviders = getConfiguredModelProviders();
configuredProviders.forEach((p) => {
try {
const provider = providers[p.type];
if (!provider) throw new Error('Invalid provider type');
this.activeProviders.push({
...p,
provider: createProviderInstance(provider, p.id, p.name, p.config),
});
} catch (err) {
console.error(
`Failed to initialize provider. Type: ${p.type}, ID: ${p.id}, Config: ${JSON.stringify(p.config)}, Error: ${err}`,
);
}
});
}
async getActiveProviders() {
const providers: MinimalProvider[] = [];
await Promise.all(
this.activeProviders.map(async (p) => {
const m = await p.provider.getModelList();
providers.push({
id: p.id,
name: p.name,
chatModels: m.chat,
embeddingModels: m.embedding,
});
}),
);
return providers;
}
async loadChatModel(providerId: string, modelName: string) {
const provider = this.activeProviders.find((p) => p.id === providerId);
if (!provider) throw new Error('Invalid provider id');
const model = await provider.provider.loadChatModel(modelName);
return model;
}
async loadEmbeddingModel(providerId: string, modelName: string) {
const provider = this.activeProviders.find((p) => p.id === providerId);
if (!provider) throw new Error('Invalid provider id');
const model = await provider.provider.loadEmbeddingModel(modelName);
return model;
}
}
export default ModelRegistry;

34
src/lib/models/types.ts Normal file
View File

@@ -0,0 +1,34 @@
type Model = {
name: string;
key: string;
};
type ModelList = {
embedding: Model[];
chat: Model[];
};
type ProviderMetadata = {
name: string;
key: string;
};
type MinimalProvider = {
id: string;
name: string;
chatModels: Model[];
embeddingModels: Model[];
};
type ModelWithProvider = {
key: string;
providerId: string;
};
export type {
Model,
ModelList,
ProviderMetadata,
MinimalProvider,
ModelWithProvider,
};

View File

@@ -1,94 +0,0 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { getAimlApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
import axios from 'axios';
export const PROVIDER_INFO = {
key: 'aimlapi',
displayName: 'AI/ML API',
};
interface AimlApiModel {
id: string;
name?: string;
type?: string;
}
const API_URL = 'https://api.aimlapi.com';
export const loadAimlApiChatModels = async () => {
const apiKey = getAimlApiKey();
if (!apiKey) return {};
try {
const response = await axios.get(`${API_URL}/models`, {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
});
const chatModels: Record<string, ChatModel> = {};
response.data.data.forEach((model: AimlApiModel) => {
if (model.type === 'chat-completion') {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
apiKey: apiKey,
modelName: model.id,
temperature: 0.7,
configuration: {
baseURL: API_URL,
},
}) as unknown as BaseChatModel,
};
}
});
return chatModels;
} catch (err) {
console.error(`Error loading AI/ML API models: ${err}`);
return {};
}
};
export const loadAimlApiEmbeddingModels = async () => {
const apiKey = getAimlApiKey();
if (!apiKey) return {};
try {
const response = await axios.get(`${API_URL}/models`, {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
});
const embeddingModels: Record<string, EmbeddingModel> = {};
response.data.data.forEach((model: AimlApiModel) => {
if (model.type === 'embedding') {
embeddingModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
apiKey: apiKey,
modelName: model.id,
configuration: {
baseURL: API_URL,
},
}) as unknown as Embeddings,
};
}
});
return embeddingModels;
} catch (err) {
console.error(`Error loading AI/ML API embeddings models: ${err}`);
return {};
}
};

View File

@@ -1,78 +0,0 @@
import { ChatAnthropic } from '@langchain/anthropic';
import { ChatModel } from '.';
import { getAnthropicApiKey } from '../config';
export const PROVIDER_INFO = {
key: 'anthropic',
displayName: 'Anthropic',
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const anthropicChatModels: Record<string, string>[] = [
{
displayName: 'Claude 4.1 Opus',
key: 'claude-opus-4-1-20250805',
},
{
displayName: 'Claude 4 Opus',
key: 'claude-opus-4-20250514',
},
{
displayName: 'Claude 4 Sonnet',
key: 'claude-sonnet-4-20250514',
},
{
displayName: 'Claude 3.7 Sonnet',
key: 'claude-3-7-sonnet-20250219',
},
{
displayName: 'Claude 3.5 Haiku',
key: 'claude-3-5-haiku-20241022',
},
{
displayName: 'Claude 3.5 Sonnet v2',
key: 'claude-3-5-sonnet-20241022',
},
{
displayName: 'Claude 3.5 Sonnet',
key: 'claude-3-5-sonnet-20240620',
},
{
displayName: 'Claude 3 Opus',
key: 'claude-3-opus-20240229',
},
{
displayName: 'Claude 3 Sonnet',
key: 'claude-3-sonnet-20240229',
},
{
displayName: 'Claude 3 Haiku',
key: 'claude-3-haiku-20240307',
},
];
export const loadAnthropicChatModels = async () => {
const anthropicApiKey = getAnthropicApiKey();
if (!anthropicApiKey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
anthropicChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatAnthropic({
apiKey: anthropicApiKey,
modelName: model.key,
temperature: 0.7,
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Anthropic models: ${err}`);
return {};
}
};

View File

@@ -1,49 +0,0 @@
import { ChatOpenAI } from '@langchain/openai';
import { getDeepseekApiKey } from '../config';
import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
export const PROVIDER_INFO = {
key: 'deepseek',
displayName: 'Deepseek AI',
};
const deepseekChatModels: Record<string, string>[] = [
{
displayName: 'Deepseek Chat (Deepseek V3)',
key: 'deepseek-chat',
},
{
displayName: 'Deepseek Reasoner (Deepseek R1)',
key: 'deepseek-reasoner',
},
];
export const loadDeepseekChatModels = async () => {
const deepseekApiKey = getDeepseekApiKey();
if (!deepseekApiKey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
deepseekChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
apiKey: deepseekApiKey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://api.deepseek.com',
},
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Deepseek models: ${err}`);
return {};
}
};

View File

@@ -1,114 +0,0 @@
import {
ChatGoogleGenerativeAI,
GoogleGenerativeAIEmbeddings,
} from '@langchain/google-genai';
import { getGeminiApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'gemini',
displayName: 'Google Gemini',
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
const geminiChatModels: Record<string, string>[] = [
{
displayName: 'Gemini 2.5 Flash',
key: 'gemini-2.5-flash',
},
{
displayName: 'Gemini 2.5 Flash-Lite',
key: 'gemini-2.5-flash-lite',
},
{
displayName: 'Gemini 2.5 Pro',
key: 'gemini-2.5-pro',
},
{
displayName: 'Gemini 2.0 Flash',
key: 'gemini-2.0-flash',
},
{
displayName: 'Gemini 2.0 Flash-Lite',
key: 'gemini-2.0-flash-lite',
},
{
displayName: 'Gemini 2.0 Flash Thinking Experimental',
key: 'gemini-2.0-flash-thinking-exp-01-21',
},
{
displayName: 'Gemini 1.5 Flash',
key: 'gemini-1.5-flash',
},
{
displayName: 'Gemini 1.5 Flash-8B',
key: 'gemini-1.5-flash-8b',
},
{
displayName: 'Gemini 1.5 Pro',
key: 'gemini-1.5-pro',
},
];
const geminiEmbeddingModels: Record<string, string>[] = [
{
displayName: 'Text Embedding 004',
key: 'models/text-embedding-004',
},
{
displayName: 'Embedding 001',
key: 'models/embedding-001',
},
];
export const loadGeminiChatModels = async () => {
const geminiApiKey = getGeminiApiKey();
if (!geminiApiKey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
geminiChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatGoogleGenerativeAI({
apiKey: geminiApiKey,
model: model.key,
temperature: 0.7,
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Gemini models: ${err}`);
return {};
}
};
export const loadGeminiEmbeddingModels = async () => {
const geminiApiKey = getGeminiApiKey();
if (!geminiApiKey) return {};
try {
const embeddingModels: Record<string, EmbeddingModel> = {};
geminiEmbeddingModels.forEach((model) => {
embeddingModels[model.key] = {
displayName: model.displayName,
model: new GoogleGenerativeAIEmbeddings({
apiKey: geminiApiKey,
modelName: model.key,
}) as unknown as Embeddings,
};
});
return embeddingModels;
} catch (err) {
console.error(`Error loading Gemini embeddings models: ${err}`);
return {};
}
};

View File

@@ -1,44 +0,0 @@
import { ChatGroq } from '@langchain/groq';
import { getGroqApiKey } from '../config';
import { ChatModel } from '.';
export const PROVIDER_INFO = {
key: 'groq',
displayName: 'Groq',
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
export const loadGroqChatModels = async () => {
const groqApiKey = getGroqApiKey();
if (!groqApiKey) return {};
try {
const res = await fetch('https://api.groq.com/openai/v1/models', {
method: 'GET',
headers: {
Authorization: `bearer ${groqApiKey}`,
'Content-Type': 'application/json',
},
});
const groqChatModels = (await res.json()).data;
const chatModels: Record<string, ChatModel> = {};
groqChatModels.forEach((model: any) => {
chatModels[model.id] = {
displayName: model.id,
model: new ChatGroq({
apiKey: groqApiKey,
model: model.id,
temperature: 0.7,
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Groq models: ${err}`);
return {};
}
};

View File

@@ -1,170 +0,0 @@
import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import {
loadOpenAIChatModels,
loadOpenAIEmbeddingModels,
PROVIDER_INFO as OpenAIInfo,
PROVIDER_INFO,
} from './openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
import { ChatOpenAI } from '@langchain/openai';
import {
loadOllamaChatModels,
loadOllamaEmbeddingModels,
PROVIDER_INFO as OllamaInfo,
} from './ollama';
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
import {
loadAnthropicChatModels,
PROVIDER_INFO as AnthropicInfo,
} from './anthropic';
import {
loadGeminiChatModels,
loadGeminiEmbeddingModels,
PROVIDER_INFO as GeminiInfo,
} from './gemini';
import {
loadTransformersEmbeddingsModels,
PROVIDER_INFO as TransformersInfo,
} from './transformers';
import {
loadDeepseekChatModels,
PROVIDER_INFO as DeepseekInfo,
} from './deepseek';
import {
loadAimlApiChatModels,
loadAimlApiEmbeddingModels,
PROVIDER_INFO as AimlApiInfo,
} from './aimlapi';
import {
loadLMStudioChatModels,
loadLMStudioEmbeddingsModels,
PROVIDER_INFO as LMStudioInfo,
} from './lmstudio';
import {
loadLemonadeChatModels,
loadLemonadeEmbeddingModels,
PROVIDER_INFO as LemonadeInfo,
} from './lemonade';
export const PROVIDER_METADATA = {
openai: OpenAIInfo,
ollama: OllamaInfo,
groq: GroqInfo,
anthropic: AnthropicInfo,
gemini: GeminiInfo,
transformers: TransformersInfo,
deepseek: DeepseekInfo,
aimlapi: AimlApiInfo,
lmstudio: LMStudioInfo,
lemonade: LemonadeInfo,
custom_openai: {
key: 'custom_openai',
displayName: 'Custom OpenAI',
},
};
export interface ChatModel {
displayName: string;
model: BaseChatModel;
}
export interface EmbeddingModel {
displayName: string;
model: Embeddings;
}
export const chatModelProviders: Record<
string,
() => Promise<Record<string, ChatModel>>
> = {
openai: loadOpenAIChatModels,
ollama: loadOllamaChatModels,
groq: loadGroqChatModels,
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels,
aimlapi: loadAimlApiChatModels,
lmstudio: loadLMStudioChatModels,
lemonade: loadLemonadeChatModels,
};
export const embeddingModelProviders: Record<
string,
() => Promise<Record<string, EmbeddingModel>>
> = {
openai: loadOpenAIEmbeddingModels,
ollama: loadOllamaEmbeddingModels,
gemini: loadGeminiEmbeddingModels,
transformers: loadTransformersEmbeddingsModels,
aimlapi: loadAimlApiEmbeddingModels,
lmstudio: loadLMStudioEmbeddingsModels,
lemonade: loadLemonadeEmbeddingModels,
};
export const getAvailableChatModelProviders = async () => {
const models: Record<string, Record<string, ChatModel>> = {};
for (const provider in chatModelProviders) {
const providerModels = await chatModelProviders[provider]();
if (Object.keys(providerModels).length > 0) {
models[provider] = providerModels;
}
}
const customOpenAiApiKey = getCustomOpenaiApiKey();
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
? {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
apiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
...(() => {
const temperatureRestrictedModels = [
'gpt-5-nano',
'gpt-5',
'gpt-5-mini',
'o1',
'o3',
'o3-mini',
'o4-mini',
];
const isTemperatureRestricted =
temperatureRestrictedModels.some((restrictedModel) =>
customOpenAiModelName.includes(restrictedModel),
);
return isTemperatureRestricted ? {} : { temperature: 0.7 };
})(),
configuration: {
baseURL: customOpenAiApiUrl,
},
}) as unknown as BaseChatModel,
},
}
: {}),
};
return models;
};
export const getAvailableEmbeddingModelProviders = async () => {
const models: Record<string, Record<string, EmbeddingModel>> = {};
for (const provider in embeddingModelProviders) {
const providerModels = await embeddingModelProviders[provider]();
if (Object.keys(providerModels).length > 0) {
models[provider] = providerModels;
}
}
return models;
};

View File

@@ -1,94 +0,0 @@
import axios from 'axios';
import { getLemonadeApiEndpoint, getLemonadeApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'lemonade',
displayName: 'Lemonade',
};
import { ChatOpenAI } from '@langchain/openai';
import { OpenAIEmbeddings } from '@langchain/openai';
export const loadLemonadeChatModels = async () => {
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
const lemonadeApiKey = getLemonadeApiKey();
if (!lemonadeApiEndpoint) return {};
try {
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
headers: {
'Content-Type': 'application/json',
...(lemonadeApiKey
? { Authorization: `Bearer ${lemonadeApiKey}` }
: {}),
},
});
const { data: models } = res.data;
const chatModels: Record<string, ChatModel> = {};
models.forEach((model: any) => {
chatModels[model.id] = {
displayName: model.id,
model: new ChatOpenAI({
apiKey: lemonadeApiKey || 'lemonade-key',
modelName: model.id,
temperature: 0.7,
configuration: {
baseURL: `${lemonadeApiEndpoint}/api/v1`,
},
}),
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Lemonade models: ${err}`);
return {};
}
};
export const loadLemonadeEmbeddingModels = async () => {
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
const lemonadeApiKey = getLemonadeApiKey();
if (!lemonadeApiEndpoint) return {};
try {
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
headers: {
'Content-Type': 'application/json',
...(lemonadeApiKey
? { Authorization: `Bearer ${lemonadeApiKey}` }
: {}),
},
});
const { data: models } = res.data;
const embeddingModels: Record<string, EmbeddingModel> = {};
// Filter models that support embeddings (if Lemonade provides this info)
// For now, we'll assume all models can be used for embeddings
models.forEach((model: any) => {
embeddingModels[model.id] = {
displayName: model.id,
model: new OpenAIEmbeddings({
apiKey: lemonadeApiKey || 'lemonade-key',
modelName: model.id,
configuration: {
baseURL: `${lemonadeApiEndpoint}/api/v1`,
},
}),
};
});
return embeddingModels;
} catch (err) {
console.error(`Error loading Lemonade embedding models: ${err}`);
return {};
}
};

View File

@@ -1,100 +0,0 @@
import { getKeepAlive, getLMStudioApiEndpoint } from '../config';
import axios from 'axios';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'lmstudio',
displayName: 'LM Studio',
};
import { ChatOpenAI } from '@langchain/openai';
import { OpenAIEmbeddings } from '@langchain/openai';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
interface LMStudioModel {
id: string;
name?: string;
}
const ensureV1Endpoint = (endpoint: string): string =>
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
try {
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
headers: { 'Content-Type': 'application/json' },
});
return true;
} catch {
return false;
}
};
export const loadLMStudioChatModels = async () => {
const endpoint = getLMStudioApiEndpoint();
if (!endpoint) return {};
if (!(await checkServerAvailability(endpoint))) return {};
try {
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
headers: { 'Content-Type': 'application/json' },
});
const chatModels: Record<string, ChatModel> = {};
response.data.data.forEach((model: LMStudioModel) => {
chatModels[model.id] = {
displayName: model.name || model.id,
model: new ChatOpenAI({
apiKey: 'lm-studio',
configuration: {
baseURL: ensureV1Endpoint(endpoint),
},
modelName: model.id,
temperature: 0.7,
streaming: true,
maxRetries: 3,
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading LM Studio models: ${err}`);
return {};
}
};
export const loadLMStudioEmbeddingsModels = async () => {
const endpoint = getLMStudioApiEndpoint();
if (!endpoint) return {};
if (!(await checkServerAvailability(endpoint))) return {};
try {
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
headers: { 'Content-Type': 'application/json' },
});
const embeddingsModels: Record<string, EmbeddingModel> = {};
response.data.data.forEach((model: LMStudioModel) => {
embeddingsModels[model.id] = {
displayName: model.name || model.id,
model: new OpenAIEmbeddings({
apiKey: 'lm-studio',
configuration: {
baseURL: ensureV1Endpoint(endpoint),
},
modelName: model.id,
}) as unknown as Embeddings,
};
});
return embeddingsModels;
} catch (err) {
console.error(`Error loading LM Studio embeddings model: ${err}`);
return {};
}
};

View File

@@ -1,86 +0,0 @@
import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'ollama',
displayName: 'Ollama',
};
import { ChatOllama } from '@langchain/ollama';
import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
try {
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
headers: {
'Content-Type': 'application/json',
},
});
const { models } = res.data;
const chatModels: Record<string, ChatModel> = {};
models.forEach((model: any) => {
chatModels[model.model] = {
displayName: model.name,
model: new ChatOllama({
baseUrl: ollamaApiEndpoint,
model: model.model,
temperature: 0.7,
keepAlive: getKeepAlive(),
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}),
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Ollama models: ${err}`);
return {};
}
};
export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
try {
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
headers: {
'Content-Type': 'application/json',
},
});
const { models } = res.data;
const embeddingModels: Record<string, EmbeddingModel> = {};
models.forEach((model: any) => {
embeddingModels[model.model] = {
displayName: model.name,
model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint,
model: model.model,
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}),
};
});
return embeddingModels;
} catch (err) {
console.error(`Error loading Ollama embeddings models: ${err}`);
return {};
}
};

View File

@@ -1,159 +0,0 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { getOpenaiApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'openai',
displayName: 'OpenAI',
};
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Embeddings } from '@langchain/core/embeddings';
const openaiChatModels: Record<string, string>[] = [
{
displayName: 'GPT-3.5 Turbo',
key: 'gpt-3.5-turbo',
},
{
displayName: 'GPT-4',
key: 'gpt-4',
},
{
displayName: 'GPT-4 turbo',
key: 'gpt-4-turbo',
},
{
displayName: 'GPT-4 omni',
key: 'gpt-4o',
},
{
displayName: 'GPT-4o (2024-05-13)',
key: 'gpt-4o-2024-05-13',
},
{
displayName: 'GPT-4 omni mini',
key: 'gpt-4o-mini',
},
{
displayName: 'GPT 4.1 nano',
key: 'gpt-4.1-nano',
},
{
displayName: 'GPT 4.1 mini',
key: 'gpt-4.1-mini',
},
{
displayName: 'GPT 4.1',
key: 'gpt-4.1',
},
{
displayName: 'GPT 5 nano',
key: 'gpt-5-nano',
},
{
displayName: 'GPT 5',
key: 'gpt-5',
},
{
displayName: 'GPT 5 Mini',
key: 'gpt-5-mini',
},
{
displayName: 'o1',
key: 'o1',
},
{
displayName: 'o3',
key: 'o3',
},
{
displayName: 'o3 Mini',
key: 'o3-mini',
},
{
displayName: 'o4 Mini',
key: 'o4-mini',
},
];
const openaiEmbeddingModels: Record<string, string>[] = [
{
displayName: 'Text Embedding 3 Small',
key: 'text-embedding-3-small',
},
{
displayName: 'Text Embedding 3 Large',
key: 'text-embedding-3-large',
},
];
export const loadOpenAIChatModels = async () => {
const openaiApiKey = getOpenaiApiKey();
if (!openaiApiKey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
openaiChatModels.forEach((model) => {
// Models that only support temperature = 1
const temperatureRestrictedModels = [
'gpt-5-nano',
'gpt-5',
'gpt-5-mini',
'o1',
'o3',
'o3-mini',
'o4-mini',
];
const isTemperatureRestricted = temperatureRestrictedModels.some(
(restrictedModel) => model.key.includes(restrictedModel),
);
const modelConfig: any = {
apiKey: openaiApiKey,
modelName: model.key,
};
// Only add temperature if the model supports it
if (!isTemperatureRestricted) {
modelConfig.temperature = 0.7;
}
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading OpenAI models: ${err}`);
return {};
}
};
export const loadOpenAIEmbeddingModels = async () => {
const openaiApiKey = getOpenaiApiKey();
if (!openaiApiKey) return {};
try {
const embeddingModels: Record<string, EmbeddingModel> = {};
openaiEmbeddingModels.forEach((model) => {
embeddingModels[model.key] = {
displayName: model.displayName,
model: new OpenAIEmbeddings({
apiKey: openaiApiKey,
modelName: model.key,
}) as unknown as Embeddings,
};
});
return embeddingModels;
} catch (err) {
console.error(`Error loading OpenAI embeddings models: ${err}`);
return {};
}
};

View File

@@ -1,36 +0,0 @@
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
export const PROVIDER_INFO = {
key: 'transformers',
displayName: 'Hugging Face',
};
export const loadTransformersEmbeddingsModels = async () => {
try {
const embeddingModels = {
'xenova-bge-small-en-v1.5': {
displayName: 'BGE Small',
model: new HuggingFaceTransformersEmbeddings({
modelName: 'Xenova/bge-small-en-v1.5',
}),
},
'xenova-gte-small': {
displayName: 'GTE Small',
model: new HuggingFaceTransformersEmbeddings({
modelName: 'Xenova/gte-small',
}),
},
'xenova-bert-base-multilingual-uncased': {
displayName: 'Bert Multilingual',
model: new HuggingFaceTransformersEmbeddings({
modelName: 'Xenova/bert-base-multilingual-uncased',
}),
},
};
return embeddingModels;
} catch (err) {
console.error(`Error loading Transformers embeddings model: ${err}`);
return {};
}
};

View File

@@ -1,5 +1,5 @@
import axios from 'axios'; import axios from 'axios';
import { getSearxngApiEndpoint } from './config'; import { getSearxngURL } from './config/serverRegistry';
interface SearxngSearchOptions { interface SearxngSearchOptions {
categories?: string[]; categories?: string[];
@@ -23,7 +23,7 @@ export const searchSearxng = async (
query: string, query: string,
opts?: SearxngSearchOptions, opts?: SearxngSearchOptions,
) => { ) => {
const searxngURL = getSearxngApiEndpoint(); const searxngURL = getSearxngURL();
const url = new URL(`${searxngURL}/search?format=json`); const url = new URL(`${searxngURL}/search?format=json`);
url.searchParams.append('q', query); url.searchParams.append('q', query);

7
src/lib/serverUtils.ts Normal file
View File

@@ -0,0 +1,7 @@
import crypto from 'crypto';
export const hashObj = (obj: { [key: string]: any }) => {
const json = JSON.stringify(obj, Object.keys(obj).sort());
const hash = crypto.createHash('sha256').update(json).digest('hex');
return hash;
};

View File

@@ -1,5 +0,0 @@
declare function computeDot(vectorA: number[], vectorB: number[]): number;
declare module 'compute-dot' {
export default computeDot;
}

View File

@@ -1,17 +1,7 @@
import dot from 'compute-dot';
import cosineSimilarity from 'compute-cosine-similarity'; import cosineSimilarity from 'compute-cosine-similarity';
import { getSimilarityMeasure } from '../config';
const computeSimilarity = (x: number[], y: number[]): number => { const computeSimilarity = (x: number[], y: number[]): number => {
const similarityMeasure = getSimilarityMeasure(); return cosineSimilarity(x, y) as number;
if (similarityMeasure === 'cosine') {
return cosineSimilarity(x, y) as number;
} else if (similarityMeasure === 'dot') {
return dot(x, y);
}
throw new Error('Invalid similarity measure');
}; };
export default computeSimilarity; export default computeSimilarity;

View File

@@ -2,17 +2,17 @@ import type { Config } from 'tailwindcss';
import type { DefaultColors } from 'tailwindcss/types/generated/colors'; import type { DefaultColors } from 'tailwindcss/types/generated/colors';
const themeDark = (colors: DefaultColors) => ({ const themeDark = (colors: DefaultColors) => ({
50: '#111116', 50: '#0d1117',
100: '#1f202b', 100: '#161b22',
200: '#2d2f3f', 200: '#21262d',
300: '#3a3c4c', 300: '#30363d',
}); });
const themeLight = (colors: DefaultColors) => ({ const themeLight = (colors: DefaultColors) => ({
50: '#ffffff', 50: '#ffffff',
100: '#f1f5f9', 100: '#f6f8fa',
200: '#c4c7c5', 200: '#d0d7de',
300: '#9ca3af', 300: '#afb8c1',
}); });
const config: Config = { const config: Config = {
@@ -49,6 +49,9 @@ const config: Config = {
}, },
}, },
}, },
plugins: [require('@tailwindcss/typography')], plugins: [
require('@tailwindcss/typography'),
require('@headlessui/tailwindcss')({ prefix: 'headless' }),
],
}; };
export default config; export default config;

124
yarn.lock
View File

@@ -39,6 +39,13 @@
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0" resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA== integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
"@cspotcode/source-map-support@^0.8.0":
version "0.8.1"
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==
dependencies:
"@jridgewell/trace-mapping" "0.3.9"
"@dabh/diagnostics@^2.0.2": "@dabh/diagnostics@^2.0.2":
version "2.0.3" version "2.0.3"
resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a" resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
@@ -407,6 +414,11 @@
"@react-aria/interactions" "^3.21.3" "@react-aria/interactions" "^3.21.3"
"@tanstack/react-virtual" "^3.8.1" "@tanstack/react-virtual" "^3.8.1"
"@headlessui/tailwindcss@^0.2.2":
version "0.2.2"
resolved "https://registry.yarnpkg.com/@headlessui/tailwindcss/-/tailwindcss-0.2.2.tgz#8ebde73fabca72d48636ea56ae790209dc5f0d49"
integrity sha512-xNe42KjdyA4kfUKLLPGzME9zkH7Q3rOZ5huFihWNWOQFxnItxPB3/67yBI8/qBfY8nwBRx5GHn4VprsoluVMGw==
"@huggingface/jinja@^0.2.2": "@huggingface/jinja@^0.2.2":
version "0.2.2" version "0.2.2"
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627" resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
@@ -575,7 +587,7 @@
"@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/sourcemap-codec" "^1.4.10"
"@jridgewell/trace-mapping" "^0.3.24" "@jridgewell/trace-mapping" "^0.3.24"
"@jridgewell/resolve-uri@^3.1.0": "@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0":
version "3.1.2" version "3.1.2"
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6"
integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==
@@ -590,6 +602,14 @@
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32"
integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==
"@jridgewell/trace-mapping@0.3.9":
version "0.3.9"
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9"
integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==
dependencies:
"@jridgewell/resolve-uri" "^3.0.3"
"@jridgewell/sourcemap-codec" "^1.4.10"
"@jridgewell/trace-mapping@^0.3.24": "@jridgewell/trace-mapping@^0.3.24":
version "0.3.25" version "0.3.25"
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0"
@@ -927,6 +947,26 @@
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4" resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4"
integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw== integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw==
"@tsconfig/node10@^1.0.7":
version "1.0.11"
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2"
integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==
"@tsconfig/node12@^1.0.7":
version "1.0.11"
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d"
integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==
"@tsconfig/node14@^1.0.0":
version "1.0.3"
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1"
integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==
"@tsconfig/node16@^1.0.2":
version "1.0.4"
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9"
integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==
"@types/better-sqlite3@^7.6.12": "@types/better-sqlite3@^7.6.12":
version "7.6.12" version "7.6.12"
resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7" resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7"
@@ -1119,6 +1159,18 @@ acorn-jsx@^5.3.2:
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
acorn-walk@^8.1.1:
version "8.3.4"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7"
integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==
dependencies:
acorn "^8.11.0"
acorn@^8.11.0, acorn@^8.4.1:
version "8.15.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816"
integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==
acorn@^8.9.0: acorn@^8.9.0:
version "8.11.3" version "8.11.3"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
@@ -1181,6 +1233,11 @@ anymatch@~3.1.2:
normalize-path "^3.0.0" normalize-path "^3.0.0"
picomatch "^2.0.4" picomatch "^2.0.4"
arg@^4.1.0:
version "4.1.3"
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
arg@^5.0.2: arg@^5.0.2:
version "5.0.2" version "5.0.2"
resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
@@ -1739,6 +1796,11 @@ core-util-is@~1.0.0:
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
create-require@^1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
cross-fetch@^3.1.5: cross-fetch@^3.1.5:
version "3.2.0" version "3.2.0"
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3"
@@ -1883,6 +1945,11 @@ didyoumean@^1.2.2:
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
diff@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
dingbat-to-unicode@^1.0.1: dingbat-to-unicode@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83" resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83"
@@ -2642,6 +2709,15 @@ fraction.js@^4.3.7:
resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7"
integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==
framer-motion@^12.23.24:
version "12.23.24"
resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-12.23.24.tgz#4895b67e880bd2b1089e61fbaa32ae802fc24b8c"
integrity sha512-HMi5HRoRCTou+3fb3h9oTLyJGBxHfW+HnNE25tAXOvVx/IvwMHK0cx7IR4a2ZU6sh3IX1Z+4ts32PcYBOqka8w==
dependencies:
motion-dom "^12.23.23"
motion-utils "^12.23.6"
tslib "^2.4.0"
fs-constants@^1.0.0: fs-constants@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
@@ -3510,6 +3586,11 @@ lucide-react@^0.363.0:
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3" resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3"
integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ== integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ==
make-error@^1.1.1:
version "1.3.6"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
mammoth@^1.9.1: mammoth@^1.9.1:
version "1.9.1" version "1.9.1"
resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf" resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf"
@@ -3602,6 +3683,18 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
motion-dom@^12.23.23:
version "12.23.23"
resolved "https://registry.yarnpkg.com/motion-dom/-/motion-dom-12.23.23.tgz#8f874333ea1a04ee3a89eb928f518b463d589e0e"
integrity sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==
dependencies:
motion-utils "^12.23.6"
motion-utils@^12.23.6:
version "12.23.6"
resolved "https://registry.yarnpkg.com/motion-utils/-/motion-utils-12.23.6.tgz#fafef80b4ea85122dd0d6c599a0c63d72881f312"
integrity sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==
ms@2.1.2: ms@2.1.2:
version "2.1.2" version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
@@ -4971,6 +5064,25 @@ ts-interface-checker@^0.1.9:
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
ts-node@^10.9.2:
version "10.9.2"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f"
integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==
dependencies:
"@cspotcode/source-map-support" "^0.8.0"
"@tsconfig/node10" "^1.0.7"
"@tsconfig/node12" "^1.0.7"
"@tsconfig/node14" "^1.0.0"
"@tsconfig/node16" "^1.0.2"
acorn "^8.4.1"
acorn-walk "^8.1.1"
arg "^4.1.0"
create-require "^1.1.0"
diff "^4.0.1"
make-error "^1.1.1"
v8-compile-cache-lib "^3.0.1"
yn "3.1.1"
tsconfig-paths@^3.15.0: tsconfig-paths@^3.15.0:
version "3.15.0" version "3.15.0"
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4"
@@ -5138,6 +5250,11 @@ uuid@^9.0.1:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
v8-compile-cache-lib@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==
validate.io-array@^1.0.3, validate.io-array@^1.0.5: validate.io-array@^1.0.3, validate.io-array@^1.0.5:
version "1.0.6" version "1.0.6"
resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d" resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d"
@@ -5355,6 +5472,11 @@ yet-another-react-lightbox@^3.17.2:
resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c" resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c"
integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ== integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ==
yn@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
yocto-queue@^0.1.0: yocto-queue@^0.1.0:
version "0.1.0" version "0.1.0"
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"