mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-25 06:28:16 +00:00
Compare commits
1 Commits
d97fa708f1
...
develop/ne
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6f9eb3b1f3 |
Binary file not shown.
|
Before Width: | Height: | Size: 2.1 MiB After Width: | Height: | Size: 641 KiB |
36
.github/workflows/docker-build.yaml
vendored
36
.github/workflows/docker-build.yaml
vendored
@@ -4,7 +4,6 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- canary
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -44,19 +43,6 @@ jobs:
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:amd64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push AMD64 Canary Docker image
|
||||
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
|
||||
run: |
|
||||
DOCKERFILE=app.dockerfile
|
||||
IMAGE_NAME=perplexica
|
||||
docker buildx build --platform linux/amd64 \
|
||||
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
|
||||
--cache-to=type=inline \
|
||||
--provenance false \
|
||||
-f $DOCKERFILE \
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push AMD64 release Docker image
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
@@ -105,19 +91,6 @@ jobs:
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:arm64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push ARM64 Canary Docker image
|
||||
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
|
||||
run: |
|
||||
DOCKERFILE=app.dockerfile
|
||||
IMAGE_NAME=perplexica
|
||||
docker buildx build --platform linux/arm64 \
|
||||
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \
|
||||
--cache-to=type=inline \
|
||||
--provenance false \
|
||||
-f $DOCKERFILE \
|
||||
-t itzcrazykns1337/${IMAGE_NAME}:canary-arm64 \
|
||||
--push .
|
||||
|
||||
- name: Build and push ARM64 release Docker image
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
@@ -155,15 +128,6 @@ jobs:
|
||||
--amend itzcrazykns1337/${IMAGE_NAME}:arm64
|
||||
docker manifest push itzcrazykns1337/${IMAGE_NAME}:main
|
||||
|
||||
- name: Create and push multi-arch manifest for canary
|
||||
if: github.ref == 'refs/heads/canary' && github.event_name == 'push'
|
||||
run: |
|
||||
IMAGE_NAME=perplexica
|
||||
docker manifest create itzcrazykns1337/${IMAGE_NAME}:canary \
|
||||
--amend itzcrazykns1337/${IMAGE_NAME}:canary-amd64 \
|
||||
--amend itzcrazykns1337/${IMAGE_NAME}:canary-arm64
|
||||
docker manifest push itzcrazykns1337/${IMAGE_NAME}:canary
|
||||
|
||||
- name: Create and push multi-arch manifest for releases
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
|
||||
@@ -15,6 +15,9 @@ COPY drizzle ./drizzle
|
||||
RUN mkdir -p /home/perplexica/data
|
||||
RUN yarn build
|
||||
|
||||
RUN yarn add --dev @vercel/ncc
|
||||
RUN yarn ncc build ./src/lib/db/migrate.ts -o migrator
|
||||
|
||||
FROM node:24.5.0-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/*
|
||||
@@ -27,6 +30,8 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static
|
||||
COPY --from=builder /home/perplexica/.next/standalone ./
|
||||
COPY --from=builder /home/perplexica/data ./data
|
||||
COPY drizzle ./drizzle
|
||||
COPY --from=builder /home/perplexica/migrator/build ./build
|
||||
COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js
|
||||
|
||||
RUN mkdir /home/perplexica/uploads
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
node migrate.js
|
||||
|
||||
exec node server.js
|
||||
12
package.json
12
package.json
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"name": "perplexica-frontend",
|
||||
"version": "1.11.0-rc3",
|
||||
"version": "1.11.0-rc2",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"build": "npm run db:migrate && next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"format:write": "prettier . --write"
|
||||
"format:write": "prettier . --write",
|
||||
"db:migrate": "node ./src/lib/db/migrate.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@headlessui/tailwindcss": "^0.2.2",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||
"@langchain/anthropic": "^0.3.24",
|
||||
@@ -20,6 +20,7 @@
|
||||
"@langchain/core": "^0.3.66",
|
||||
"@langchain/google-genai": "^0.2.15",
|
||||
"@langchain/groq": "^0.2.3",
|
||||
"@langchain/langgraph": "^0.4.9",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@langchain/openai": "^0.6.2",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
@@ -29,8 +30,8 @@
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"clsx": "^2.1.0",
|
||||
"compute-cosine-similarity": "^1.1.0",
|
||||
"compute-dot": "^1.1.0",
|
||||
"drizzle-orm": "^0.40.1",
|
||||
"framer-motion": "^12.23.24",
|
||||
"html-to-text": "^9.0.5",
|
||||
"jspdf": "^3.0.1",
|
||||
"langchain": "^0.3.30",
|
||||
@@ -65,7 +66,6 @@
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.2.5",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
import crypto from 'crypto';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { EventEmitter } from 'stream';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
import db from '@/lib/db';
|
||||
import { chats, messages as messagesSchema } from '@/lib/db/schema';
|
||||
import { and, eq, gt } from 'drizzle-orm';
|
||||
import { getFileDetails } from '@/lib/utils/files';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { searchHandlers } from '@/lib/search';
|
||||
import { z } from 'zod';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
@@ -19,30 +28,14 @@ const messageSchema = z.object({
|
||||
content: z.string().min(1, 'Message content is required'),
|
||||
});
|
||||
|
||||
const chatModelSchema: z.ZodType<ModelWithProvider> = z.object({
|
||||
providerId: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Chat model provider id must be provided',
|
||||
}),
|
||||
}),
|
||||
key: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Chat model key must be provided',
|
||||
}),
|
||||
}),
|
||||
const chatModelSchema = z.object({
|
||||
provider: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
});
|
||||
|
||||
const embeddingModelSchema: z.ZodType<ModelWithProvider> = z.object({
|
||||
providerId: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Embedding model provider id must be provided',
|
||||
}),
|
||||
}),
|
||||
key: z.string({
|
||||
errorMap: () => ({
|
||||
message: 'Embedding model key must be provided',
|
||||
}),
|
||||
}),
|
||||
const embeddingModelSchema = z.object({
|
||||
provider: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
});
|
||||
|
||||
const bodySchema = z.object({
|
||||
@@ -64,8 +57,8 @@ const bodySchema = z.object({
|
||||
.optional()
|
||||
.default([]),
|
||||
files: z.array(z.string()).optional().default([]),
|
||||
chatModel: chatModelSchema,
|
||||
embeddingModel: embeddingModelSchema,
|
||||
chatModel: chatModelSchema.optional().default({}),
|
||||
embeddingModel: embeddingModelSchema.optional().default({}),
|
||||
systemInstructions: z.string().nullable().optional().default(''),
|
||||
});
|
||||
|
||||
@@ -255,16 +248,56 @@ export const POST = async (req: Request) => {
|
||||
);
|
||||
}
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const [llm, embedding] = await Promise.all([
|
||||
registry.loadChatModel(body.chatModel.providerId, body.chatModel.key),
|
||||
registry.loadEmbeddingModel(
|
||||
body.embeddingModel.providerId,
|
||||
body.embeddingModel.key,
|
||||
),
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.name || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
const embeddingProvider =
|
||||
embeddingModelProviders[
|
||||
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0]
|
||||
];
|
||||
const embeddingModel =
|
||||
embeddingProvider[
|
||||
body.embeddingModel?.name || Object.keys(embeddingProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
let embedding = embeddingModel.model;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
if (!embedding) {
|
||||
return Response.json(
|
||||
{ error: 'Invalid embedding model' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const humanMessageId =
|
||||
message.messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
|
||||
|
||||
@@ -1,33 +1,134 @@
|
||||
import configManager from '@/lib/config';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
import {
|
||||
getAnthropicApiKey,
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
getGeminiApiKey,
|
||||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
getDeepseekApiKey,
|
||||
getAimlApiKey,
|
||||
getLMStudioApiEndpoint,
|
||||
getLemonadeApiEndpoint,
|
||||
getLemonadeApiKey,
|
||||
updateConfig,
|
||||
getOllamaApiKey,
|
||||
} from '@/lib/config';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
|
||||
export const GET = async (req: NextRequest) => {
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const values = configManager.currentConfig;
|
||||
const fields = configManager.getUIConfigSections();
|
||||
const config: Record<string, any> = {};
|
||||
|
||||
const modelRegistry = new ModelRegistry();
|
||||
const modelProviders = await modelRegistry.getActiveProviders();
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
values.modelProviders = values.modelProviders.map((mp) => {
|
||||
const activeProvider = modelProviders.find((p) => p.id === mp.id)
|
||||
config['chatModelProviders'] = {};
|
||||
config['embeddingModelProviders'] = {};
|
||||
|
||||
return {
|
||||
...mp,
|
||||
chatModels: activeProvider?.chatModels ?? mp.chatModels,
|
||||
embeddingModels: activeProvider?.embeddingModels ?? mp.embeddingModels
|
||||
}
|
||||
})
|
||||
for (const provider in chatModelProviders) {
|
||||
config['chatModelProviders'][provider] = Object.keys(
|
||||
chatModelProviders[provider],
|
||||
).map((model) => {
|
||||
return {
|
||||
name: model,
|
||||
displayName: chatModelProviders[provider][model].displayName,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
values,
|
||||
fields,
|
||||
})
|
||||
for (const provider in embeddingModelProviders) {
|
||||
config['embeddingModelProviders'][provider] = Object.keys(
|
||||
embeddingModelProviders[provider],
|
||||
).map((model) => {
|
||||
return {
|
||||
name: model,
|
||||
displayName: embeddingModelProviders[provider][model].displayName,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
config['openaiApiKey'] = getOpenaiApiKey();
|
||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||
config['ollamaApiKey'] = getOllamaApiKey();
|
||||
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
|
||||
config['lemonadeApiUrl'] = getLemonadeApiEndpoint();
|
||||
config['lemonadeApiKey'] = getLemonadeApiKey();
|
||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
config['geminiApiKey'] = getGeminiApiKey();
|
||||
config['deepseekApiKey'] = getDeepseekApiKey();
|
||||
config['aimlApiKey'] = getAimlApiKey();
|
||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||
|
||||
return Response.json({ ...config }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('Error in getting config: ', err);
|
||||
console.error('An error occurred while getting config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error has occurred.' },
|
||||
{ message: 'An error occurred while getting config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
try {
|
||||
const config = await req.json();
|
||||
|
||||
const updatedConfig = {
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: config.openaiApiKey,
|
||||
},
|
||||
GROQ: {
|
||||
API_KEY: config.groqApiKey,
|
||||
},
|
||||
ANTHROPIC: {
|
||||
API_KEY: config.anthropicApiKey,
|
||||
},
|
||||
GEMINI: {
|
||||
API_KEY: config.geminiApiKey,
|
||||
},
|
||||
OLLAMA: {
|
||||
API_URL: config.ollamaApiUrl,
|
||||
API_KEY: config.ollamaApiKey,
|
||||
},
|
||||
DEEPSEEK: {
|
||||
API_KEY: config.deepseekApiKey,
|
||||
},
|
||||
AIMLAPI: {
|
||||
API_KEY: config.aimlApiKey,
|
||||
},
|
||||
LM_STUDIO: {
|
||||
API_URL: config.lmStudioApiUrl,
|
||||
},
|
||||
LEMONADE: {
|
||||
API_URL: config.lemonadeApiUrl,
|
||||
API_KEY: config.lemonadeApiKey,
|
||||
},
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: config.customOpenaiApiUrl,
|
||||
API_KEY: config.customOpenaiApiKey,
|
||||
MODEL_NAME: config.customOpenaiModelName,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
updateConfig(updatedConfig);
|
||||
|
||||
return Response.json({ message: 'Config updated' }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('An error occurred while updating config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error occurred while updating config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,23 @@
|
||||
import handleImageSearch from '@/lib/chains/imageSearchAgent';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface ImageSearchBody {
|
||||
query: string;
|
||||
chatHistory: any[];
|
||||
chatModel: ModelWithProvider;
|
||||
chatModel?: ChatModel;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@@ -23,12 +34,35 @@ export const POST = async (req: Request) => {
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const chatModelProviders = await getAvailableChatModelProviders();
|
||||
|
||||
const llm = await registry.loadChatModel(
|
||||
body.chatModel.providerId,
|
||||
body.chatModel.key,
|
||||
);
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.model || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
const images = await handleImageSearch(
|
||||
{
|
||||
|
||||
47
src/app/api/models/route.ts
Normal file
47
src/app/api/models/route.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
Object.keys(chatModelProviders).forEach((provider) => {
|
||||
Object.keys(chatModelProviders[provider]).forEach((model) => {
|
||||
delete (chatModelProviders[provider][model] as { model?: unknown })
|
||||
.model;
|
||||
});
|
||||
});
|
||||
|
||||
Object.keys(embeddingModelProviders).forEach((provider) => {
|
||||
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
|
||||
delete (embeddingModelProviders[provider][model] as { model?: unknown })
|
||||
.model;
|
||||
});
|
||||
});
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
chatModelProviders,
|
||||
embeddingModelProviders,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while fetching models', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const registry = new ModelRegistry();
|
||||
|
||||
const activeProviders = await registry.getActiveProviders();
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
providers: activeProviders,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error occurred while fetching providers', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,12 +1,22 @@
|
||||
import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface SuggestionsGenerationBody {
|
||||
chatHistory: any[];
|
||||
chatModel: ModelWithProvider;
|
||||
chatModel?: ChatModel;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@@ -23,12 +33,35 @@ export const POST = async (req: Request) => {
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const chatModelProviders = await getAvailableChatModelProviders();
|
||||
|
||||
const llm = await registry.loadChatModel(
|
||||
body.chatModel.providerId,
|
||||
body.chatModel.key,
|
||||
);
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.model || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
const suggestions = await generateSuggestions(
|
||||
{
|
||||
|
||||
@@ -1,12 +1,23 @@
|
||||
import handleVideoSearch from '@/lib/chains/videoSearchAgent';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '@/lib/config';
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface VideoSearchBody {
|
||||
query: string;
|
||||
chatHistory: any[];
|
||||
chatModel: ModelWithProvider;
|
||||
chatModel?: ChatModel;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@@ -23,12 +34,35 @@ export const POST = async (req: Request) => {
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const registry = new ModelRegistry();
|
||||
const chatModelProviders = await getAvailableChatModelProviders();
|
||||
|
||||
const llm = await registry.loadChatModel(
|
||||
body.chatModel.providerId,
|
||||
body.chatModel.key,
|
||||
);
|
||||
const chatModelProvider =
|
||||
chatModelProviders[
|
||||
body.chatModel?.provider || Object.keys(chatModelProviders)[0]
|
||||
];
|
||||
const chatModel =
|
||||
chatModelProvider[
|
||||
body.chatModel?.model || Object.keys(chatModelProvider)[0]
|
||||
];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (body.chatModel?.provider === 'custom_openai') {
|
||||
llm = new ChatOpenAI({
|
||||
apiKey: getCustomOpenaiApiKey(),
|
||||
modelName: getCustomOpenaiModelName(),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: getCustomOpenaiApiUrl(),
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
return Response.json({ error: 'Invalid chat model' }, { status: 400 });
|
||||
}
|
||||
|
||||
const videos = await handleVideoSearch(
|
||||
{
|
||||
|
||||
@@ -63,7 +63,7 @@ const Focus = () => {
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="active:border-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
className=" text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
{focusMode !== 'webSearch' ? (
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
|
||||
@@ -33,10 +33,11 @@ const SearchImages = ({
|
||||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
const chatModelProvider = localStorage.getItem(
|
||||
'chatModelProviderId',
|
||||
);
|
||||
const chatModel = localStorage.getItem('chatModelKey');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
|
||||
const res = await fetch(`/api/images`, {
|
||||
method: 'POST',
|
||||
@@ -47,8 +48,12 @@ const SearchImages = ({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
providerId: chatModelProvider,
|
||||
key: chatModel,
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -48,10 +48,11 @@ const Searchvideos = ({
|
||||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
const chatModelProvider = localStorage.getItem(
|
||||
'chatModelProviderId',
|
||||
);
|
||||
const chatModel = localStorage.getItem('chatModelKey');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
|
||||
const res = await fetch(`/api/videos`, {
|
||||
method: 'POST',
|
||||
@@ -62,8 +63,12 @@ const Searchvideos = ({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
providerId: chatModelProvider,
|
||||
key: chatModel,
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -70,7 +70,7 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-secondary dark:bg-dark-secondary px-4 py-4 shadow-sm lg:hidden">
|
||||
<div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-primary dark:bg-dark-primary px-4 py-4 shadow-sm lg:hidden">
|
||||
{navLinks.map((link, i) => (
|
||||
<Link
|
||||
href={link.href}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
export const register = async () => {
|
||||
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
||||
try {
|
||||
console.log('Running database migrations...');
|
||||
await import('./lib/db/migrate');
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Failed to run database migrations:', error);
|
||||
}
|
||||
|
||||
await import('./lib/config/index');
|
||||
}
|
||||
};
|
||||
@@ -1,8 +1,11 @@
|
||||
import { Message } from '@/components/ChatWindow';
|
||||
|
||||
export const getSuggestions = async (chatHistory: Message[]) => {
|
||||
const chatModel = localStorage.getItem('chatModelKey');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProviderId');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
|
||||
const res = await fetch(`/api/suggestions`, {
|
||||
method: 'POST',
|
||||
@@ -12,8 +15,12 @@ export const getSuggestions = async (chatHistory: Message[]) => {
|
||||
body: JSON.stringify({
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
providerId: chatModelProvider,
|
||||
key: chatModel,
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIKey,
|
||||
customOpenAIBaseURL,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
158
src/lib/config.ts
Normal file
158
src/lib/config.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import toml from '@iarna/toml';
|
||||
|
||||
// Use dynamic imports for Node.js modules to prevent client-side errors
|
||||
let fs: any;
|
||||
let path: any;
|
||||
if (typeof window === 'undefined') {
|
||||
// We're on the server
|
||||
fs = require('fs');
|
||||
path = require('path');
|
||||
}
|
||||
|
||||
const configFileName = 'config.toml';
|
||||
|
||||
interface Config {
|
||||
GENERAL: {
|
||||
SIMILARITY_MEASURE: string;
|
||||
KEEP_ALIVE: string;
|
||||
};
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
GROQ: {
|
||||
API_KEY: string;
|
||||
};
|
||||
ANTHROPIC: {
|
||||
API_KEY: string;
|
||||
};
|
||||
GEMINI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
OLLAMA: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
};
|
||||
DEEPSEEK: {
|
||||
API_KEY: string;
|
||||
};
|
||||
AIMLAPI: {
|
||||
API_KEY: string;
|
||||
};
|
||||
LM_STUDIO: {
|
||||
API_URL: string;
|
||||
};
|
||||
LEMONADE: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
};
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: string;
|
||||
API_KEY: string;
|
||||
MODEL_NAME: string;
|
||||
};
|
||||
};
|
||||
API_ENDPOINTS: {
|
||||
SEARXNG: string;
|
||||
};
|
||||
}
|
||||
|
||||
type RecursivePartial<T> = {
|
||||
[P in keyof T]?: RecursivePartial<T[P]>;
|
||||
};
|
||||
|
||||
const loadConfig = () => {
|
||||
// Server-side only
|
||||
if (typeof window === 'undefined') {
|
||||
return toml.parse(
|
||||
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
||||
) as any as Config;
|
||||
}
|
||||
|
||||
// Client-side fallback - settings will be loaded via API
|
||||
return {} as Config;
|
||||
};
|
||||
|
||||
export const getSimilarityMeasure = () =>
|
||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
|
||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||
|
||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
||||
|
||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||
|
||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||
|
||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||
|
||||
export const getSearxngApiEndpoint = () =>
|
||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
|
||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||
|
||||
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
|
||||
|
||||
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
||||
|
||||
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
|
||||
|
||||
export const getCustomOpenaiApiKey = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||
|
||||
export const getCustomOpenaiApiUrl = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
||||
|
||||
export const getCustomOpenaiModelName = () =>
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||
|
||||
export const getLMStudioApiEndpoint = () =>
|
||||
loadConfig().MODELS.LM_STUDIO.API_URL;
|
||||
|
||||
export const getLemonadeApiEndpoint = () =>
|
||||
loadConfig().MODELS.LEMONADE.API_URL;
|
||||
|
||||
export const getLemonadeApiKey = () => loadConfig().MODELS.LEMONADE.API_KEY;
|
||||
|
||||
const mergeConfigs = (current: any, update: any): any => {
|
||||
if (update === null || update === undefined) {
|
||||
return current;
|
||||
}
|
||||
|
||||
if (typeof current !== 'object' || current === null) {
|
||||
return update;
|
||||
}
|
||||
|
||||
const result = { ...current };
|
||||
|
||||
for (const key in update) {
|
||||
if (Object.prototype.hasOwnProperty.call(update, key)) {
|
||||
const updateValue = update[key];
|
||||
|
||||
if (
|
||||
typeof updateValue === 'object' &&
|
||||
updateValue !== null &&
|
||||
typeof result[key] === 'object' &&
|
||||
result[key] !== null
|
||||
) {
|
||||
result[key] = mergeConfigs(result[key], updateValue);
|
||||
} else if (updateValue !== undefined) {
|
||||
result[key] = updateValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
// Server-side only
|
||||
if (typeof window === 'undefined') {
|
||||
const currentConfig = loadConfig();
|
||||
const mergedConfig = mergeConfigs(currentConfig, config);
|
||||
fs.writeFileSync(
|
||||
path.join(path.join(process.cwd(), `${configFileName}`)),
|
||||
toml.stringify(mergedConfig),
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,16 +0,0 @@
|
||||
'use client';
|
||||
|
||||
const getClientConfig = (key: string, defaultVal?: any) => {
|
||||
return localStorage.getItem(key) ?? defaultVal ?? undefined;
|
||||
};
|
||||
|
||||
export const getTheme = () => getClientConfig('theme', 'dark');
|
||||
|
||||
export const getAutoImageSearch = () =>
|
||||
Boolean(getClientConfig('autoImageSearch', 'true'));
|
||||
|
||||
export const getAutoVideoSearch = () =>
|
||||
Boolean(getClientConfig('autoVideoSearch', 'true'));
|
||||
|
||||
export const getSystemInstructions = () =>
|
||||
getClientConfig('systemInstructions', '');
|
||||
@@ -1,242 +0,0 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'fs';
|
||||
import { Config, ConfigModelProvider, UIConfigSections } from './types';
|
||||
import { hashObj } from '../serverUtils';
|
||||
import { getModelProvidersUIConfigSection } from '../models/providers';
|
||||
|
||||
class ConfigManager {
|
||||
configPath: string = path.join(
|
||||
process.env.DATA_DIR || process.cwd(),
|
||||
'/data/config.json',
|
||||
);
|
||||
configVersion = 1;
|
||||
currentConfig: Config = {
|
||||
version: this.configVersion,
|
||||
setupComplete: false,
|
||||
general: {},
|
||||
modelProviders: [],
|
||||
search: {
|
||||
searxngURL: '',
|
||||
},
|
||||
};
|
||||
uiConfigSections: UIConfigSections = {
|
||||
general: [],
|
||||
modelProviders: [],
|
||||
search: [
|
||||
{
|
||||
name: 'SearXNG URL',
|
||||
key: 'searxngURL',
|
||||
type: 'string',
|
||||
required: false,
|
||||
description: 'The URL of your SearXNG instance',
|
||||
placeholder: 'http://localhost:4000',
|
||||
default: '',
|
||||
scope: 'server',
|
||||
env: 'SEARXNG_API_URL',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
constructor() {
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
private initialize() {
|
||||
this.initializeConfig();
|
||||
this.initializeFromEnv();
|
||||
}
|
||||
|
||||
private saveConfig() {
|
||||
fs.writeFileSync(
|
||||
this.configPath,
|
||||
JSON.stringify(this.currentConfig, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
private initializeConfig() {
|
||||
const exists = fs.existsSync(this.configPath);
|
||||
if (!exists) {
|
||||
fs.writeFileSync(
|
||||
this.configPath,
|
||||
JSON.stringify(this.currentConfig, null, 2),
|
||||
);
|
||||
} else {
|
||||
try {
|
||||
this.currentConfig = JSON.parse(
|
||||
fs.readFileSync(this.configPath, 'utf-8'),
|
||||
);
|
||||
} catch (err) {
|
||||
if (err instanceof SyntaxError) {
|
||||
console.error(
|
||||
`Error parsing config file at ${this.configPath}:`,
|
||||
err,
|
||||
);
|
||||
console.log(
|
||||
'Loading default config and overwriting the existing file.',
|
||||
);
|
||||
fs.writeFileSync(
|
||||
this.configPath,
|
||||
JSON.stringify(this.currentConfig, null, 2),
|
||||
);
|
||||
return;
|
||||
} else {
|
||||
console.log('Unknown error reading config file:', err);
|
||||
}
|
||||
}
|
||||
|
||||
this.currentConfig = this.migrateConfig(this.currentConfig);
|
||||
}
|
||||
}
|
||||
|
||||
private migrateConfig(config: Config): Config {
|
||||
/* TODO: Add migrations */
|
||||
return config;
|
||||
}
|
||||
|
||||
private initializeFromEnv() {
|
||||
/* providers section*/
|
||||
const providerConfigSections = getModelProvidersUIConfigSection();
|
||||
|
||||
this.uiConfigSections.modelProviders = providerConfigSections;
|
||||
|
||||
const newProviders: ConfigModelProvider[] = [];
|
||||
|
||||
providerConfigSections.forEach((provider) => {
|
||||
const newProvider: ConfigModelProvider & { required?: string[] } = {
|
||||
id: crypto.randomUUID(),
|
||||
name: `${provider.name} ${Math.floor(Math.random() * 1000)}`,
|
||||
type: provider.key,
|
||||
chatModels: [],
|
||||
embeddingModels: [],
|
||||
config: {},
|
||||
required: [],
|
||||
hash: '',
|
||||
};
|
||||
|
||||
provider.fields.forEach((field) => {
|
||||
newProvider.config[field.key] =
|
||||
process.env[field.env!] ||
|
||||
field.default ||
|
||||
''; /* Env var must exist for providers */
|
||||
|
||||
if (field.required) newProvider.required?.push(field.key);
|
||||
});
|
||||
|
||||
let configured = true;
|
||||
|
||||
newProvider.required?.forEach((r) => {
|
||||
if (!newProvider.config[r]) {
|
||||
configured = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (configured) {
|
||||
const hash = hashObj(newProvider.config);
|
||||
newProvider.hash = hash;
|
||||
delete newProvider.required;
|
||||
|
||||
const exists = this.currentConfig.modelProviders.find(
|
||||
(p) => p.hash === hash,
|
||||
);
|
||||
|
||||
if (!exists) {
|
||||
newProviders.push(newProvider);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.currentConfig.modelProviders.push(...newProviders);
|
||||
|
||||
/* search section */
|
||||
this.uiConfigSections.search.forEach((f) => {
|
||||
if (f.env && !this.currentConfig.search[f.key]) {
|
||||
this.currentConfig.search[f.key] =
|
||||
process.env[f.env] ?? f.default ?? '';
|
||||
}
|
||||
});
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public getConfig(key: string, defaultValue?: any): any {
|
||||
const nested = key.split('.');
|
||||
let obj: any = this.currentConfig;
|
||||
|
||||
for (let i = 0; i < nested.length; i++) {
|
||||
const part = nested[i];
|
||||
if (obj == null) return defaultValue;
|
||||
|
||||
obj = obj[part];
|
||||
}
|
||||
|
||||
return obj === undefined ? defaultValue : obj;
|
||||
}
|
||||
|
||||
public updateConfig(key: string, val: any) {
|
||||
const parts = key.split('.');
|
||||
if (parts.length === 0) return;
|
||||
|
||||
let target: any = this.currentConfig;
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (target[part] === null || typeof target[part] !== 'object') {
|
||||
target[part] = {};
|
||||
}
|
||||
|
||||
target = target[part];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
target[finalKey] = val;
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public addModelProvider(type: string, name: string, config: any) {
|
||||
const newModelProvider: ConfigModelProvider = {
|
||||
id: crypto.randomUUID(),
|
||||
name,
|
||||
type,
|
||||
config,
|
||||
chatModels: [],
|
||||
embeddingModels: [],
|
||||
hash: hashObj(config),
|
||||
};
|
||||
|
||||
this.currentConfig.modelProviders.push(newModelProvider);
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public removeModelProvider(id: string) {
|
||||
const index = this.currentConfig.modelProviders.findIndex(
|
||||
(p) => p.id === id,
|
||||
);
|
||||
|
||||
if (index === -1) return;
|
||||
|
||||
this.currentConfig.modelProviders =
|
||||
this.currentConfig.modelProviders.filter((p) => p.id !== id);
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public isSetupComplete() {
|
||||
return this.currentConfig.setupComplete;
|
||||
}
|
||||
|
||||
public markSetupComplete() {
|
||||
if (!this.currentConfig.setupComplete) {
|
||||
this.currentConfig.setupComplete = true;
|
||||
}
|
||||
|
||||
this.saveConfig();
|
||||
}
|
||||
|
||||
public getUIConfigSections(): UIConfigSections {
|
||||
return this.uiConfigSections;
|
||||
}
|
||||
}
|
||||
|
||||
const configManager = new ConfigManager();
|
||||
|
||||
export default configManager;
|
||||
@@ -1,14 +0,0 @@
|
||||
import configManager from './index';
|
||||
import { ConfigModelProvider } from './types';
|
||||
|
||||
export const getConfiguredModelProviders = (): ConfigModelProvider[] => {
|
||||
return configManager.getConfig('modelProviders', []);
|
||||
};
|
||||
|
||||
export const getConfiguredModelProviderById = (
|
||||
id: string,
|
||||
): ConfigModelProvider | undefined => {
|
||||
return getConfiguredModelProviders().find((p) => p.id === id) ?? undefined;
|
||||
};
|
||||
|
||||
export const getSearxngURL = () => configManager.getConfig('search.searxngURL', '')
|
||||
@@ -1,89 +0,0 @@
|
||||
import { Model } from '../models/types';
|
||||
|
||||
type BaseUIConfigField = {
|
||||
name: string;
|
||||
key: string;
|
||||
required: boolean;
|
||||
description: string;
|
||||
scope: 'client' | 'server';
|
||||
env?: string;
|
||||
};
|
||||
|
||||
type StringUIConfigField = BaseUIConfigField & {
|
||||
type: 'string';
|
||||
placeholder?: string;
|
||||
default?: string;
|
||||
};
|
||||
|
||||
type SelectUIConfigFieldOptions = {
|
||||
name: string;
|
||||
key: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
type SelectUIConfigField = BaseUIConfigField & {
|
||||
type: 'select';
|
||||
default?: string;
|
||||
options: SelectUIConfigFieldOptions[];
|
||||
};
|
||||
|
||||
type PasswordUIConfigField = BaseUIConfigField & {
|
||||
type: 'password';
|
||||
placeholder?: string;
|
||||
default?: string;
|
||||
};
|
||||
|
||||
type UIConfigField =
|
||||
| StringUIConfigField
|
||||
| SelectUIConfigField
|
||||
| PasswordUIConfigField;
|
||||
|
||||
type ConfigModelProvider = {
|
||||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
chatModels: Model[];
|
||||
embeddingModels: Model[];
|
||||
config: { [key: string]: any };
|
||||
hash: string;
|
||||
};
|
||||
|
||||
type Config = {
|
||||
version: number;
|
||||
setupComplete: boolean;
|
||||
general: {
|
||||
[key: string]: any;
|
||||
};
|
||||
modelProviders: ConfigModelProvider[];
|
||||
search: {
|
||||
[key: string]: any
|
||||
}
|
||||
};
|
||||
|
||||
type EnvMap = {
|
||||
[key: string]: {
|
||||
fieldKey: string;
|
||||
providerKey: string;
|
||||
};
|
||||
};
|
||||
|
||||
type ModelProviderUISection = {
|
||||
name: string;
|
||||
key: string;
|
||||
fields: UIConfigField[];
|
||||
};
|
||||
|
||||
type UIConfigSections = {
|
||||
general: UIConfigField[];
|
||||
modelProviders: ModelProviderUISection[];
|
||||
search: UIConfigField[];
|
||||
};
|
||||
|
||||
export type {
|
||||
UIConfigField,
|
||||
Config,
|
||||
EnvMap,
|
||||
UIConfigSections,
|
||||
ModelProviderUISection,
|
||||
ConfigModelProvider,
|
||||
};
|
||||
@@ -2,12 +2,9 @@ import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || process.cwd();
|
||||
const dbPath = path.join(DATA_DIR, './data/db.sqlite');
|
||||
const db = new Database(path.join(process.cwd(), 'data', 'db.sqlite'));
|
||||
|
||||
const db = new Database(dbPath);
|
||||
|
||||
const migrationsFolder = path.join(DATA_DIR, 'drizzle');
|
||||
const migrationsFolder = path.join(process.cwd(), 'drizzle');
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS ran_migrations (
|
||||
@@ -57,7 +54,7 @@ fs.readdirSync(migrationsFolder)
|
||||
id INTEGER PRIMARY KEY,
|
||||
type TEXT NOT NULL,
|
||||
chatId TEXT NOT NULL,
|
||||
createdAt TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
createdAt TEXT NOT NULL,
|
||||
messageId TEXT NOT NULL,
|
||||
content TEXT,
|
||||
sources TEXT DEFAULT '[]'
|
||||
@@ -70,10 +67,8 @@ fs.readdirSync(migrationsFolder)
|
||||
`);
|
||||
|
||||
messages.forEach((msg: any) => {
|
||||
while (typeof msg.metadata === 'string') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
}
|
||||
if (msg.type === 'user') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run(
|
||||
'user',
|
||||
msg.chatId,
|
||||
@@ -83,6 +78,7 @@ fs.readdirSync(migrationsFolder)
|
||||
'[]',
|
||||
);
|
||||
} else if (msg.type === 'assistant') {
|
||||
msg.metadata = JSON.parse(msg.metadata || '{}');
|
||||
insertMessage.run(
|
||||
'assistant',
|
||||
msg.chatId,
|
||||
|
||||
@@ -20,7 +20,6 @@ import crypto from 'crypto';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
import { toast } from 'sonner';
|
||||
import { getSuggestions } from '../actions';
|
||||
import { MinimalProvider } from '../models/types';
|
||||
|
||||
export type Section = {
|
||||
userMessage: UserMessage;
|
||||
@@ -67,13 +66,13 @@ export interface File {
|
||||
}
|
||||
|
||||
interface ChatModelProvider {
|
||||
key: string;
|
||||
providerId: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
}
|
||||
|
||||
interface EmbeddingModelProvider {
|
||||
key: string;
|
||||
providerId: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
}
|
||||
|
||||
const checkConfig = async (
|
||||
@@ -83,12 +82,10 @@ const checkConfig = async (
|
||||
setHasError: (hasError: boolean) => void,
|
||||
) => {
|
||||
try {
|
||||
let chatModelKey = localStorage.getItem('chatModelKey');
|
||||
let chatModelProviderId = localStorage.getItem('chatModelProviderId');
|
||||
let embeddingModelKey = localStorage.getItem('embeddingModelKey');
|
||||
let embeddingModelProviderId = localStorage.getItem(
|
||||
'embeddingModelProviderId',
|
||||
);
|
||||
let chatModel = localStorage.getItem('chatModel');
|
||||
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
let embeddingModel = localStorage.getItem('embeddingModel');
|
||||
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
|
||||
|
||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
||||
@@ -101,81 +98,145 @@ const checkConfig = async (
|
||||
localStorage.setItem('autoVideoSearch', 'false');
|
||||
}
|
||||
|
||||
const res = await fetch(`/api/providers`, {
|
||||
const providers = await fetch(`/api/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}).then(async (res) => {
|
||||
if (!res.ok)
|
||||
throw new Error(
|
||||
`Failed to fetch models: ${res.status} ${res.statusText}`,
|
||||
);
|
||||
return res.json();
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(
|
||||
`Provider fetching failed with status code ${res.status}`,
|
||||
);
|
||||
if (
|
||||
!chatModel ||
|
||||
!chatModelProvider ||
|
||||
!embeddingModel ||
|
||||
!embeddingModelProvider
|
||||
) {
|
||||
if (!chatModel || !chatModelProvider) {
|
||||
const chatModelProviders = providers.chatModelProviders;
|
||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
||||
|
||||
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
|
||||
return toast.error('No chat models available');
|
||||
} else {
|
||||
chatModelProvider =
|
||||
chatModelProvidersKeys.find(
|
||||
(provider) =>
|
||||
Object.keys(chatModelProviders[provider]).length > 0,
|
||||
) || chatModelProvidersKeys[0];
|
||||
}
|
||||
|
||||
if (
|
||||
chatModelProvider === 'custom_openai' &&
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0
|
||||
) {
|
||||
toast.error(
|
||||
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
|
||||
);
|
||||
return setHasError(true);
|
||||
}
|
||||
|
||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
}
|
||||
|
||||
if (!embeddingModel || !embeddingModelProvider) {
|
||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||
|
||||
if (
|
||||
!embeddingModelProviders ||
|
||||
Object.keys(embeddingModelProviders).length === 0
|
||||
)
|
||||
return toast.error('No embedding models available');
|
||||
|
||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||
embeddingModel = Object.keys(
|
||||
embeddingModelProviders[embeddingModelProvider],
|
||||
)[0];
|
||||
}
|
||||
|
||||
localStorage.setItem('chatModel', chatModel!);
|
||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||
localStorage.setItem('embeddingModel', embeddingModel!);
|
||||
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
|
||||
} else {
|
||||
const chatModelProviders = providers.chatModelProviders;
|
||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||
|
||||
if (
|
||||
Object.keys(chatModelProviders).length > 0 &&
|
||||
(!chatModelProviders[chatModelProvider] ||
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
|
||||
) {
|
||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
||||
chatModelProvider =
|
||||
chatModelProvidersKeys.find(
|
||||
(key) => Object.keys(chatModelProviders[key]).length > 0,
|
||||
) || chatModelProvidersKeys[0];
|
||||
|
||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||
}
|
||||
|
||||
if (
|
||||
chatModelProvider &&
|
||||
!chatModelProviders[chatModelProvider][chatModel]
|
||||
) {
|
||||
if (
|
||||
chatModelProvider === 'custom_openai' &&
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0
|
||||
) {
|
||||
toast.error(
|
||||
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
|
||||
);
|
||||
return setHasError(true);
|
||||
}
|
||||
|
||||
chatModel = Object.keys(
|
||||
chatModelProviders[
|
||||
Object.keys(chatModelProviders[chatModelProvider]).length > 0
|
||||
? chatModelProvider
|
||||
: Object.keys(chatModelProviders)[0]
|
||||
],
|
||||
)[0];
|
||||
|
||||
localStorage.setItem('chatModel', chatModel);
|
||||
}
|
||||
|
||||
if (
|
||||
Object.keys(embeddingModelProviders).length > 0 &&
|
||||
!embeddingModelProviders[embeddingModelProvider]
|
||||
) {
|
||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
|
||||
}
|
||||
|
||||
if (
|
||||
embeddingModelProvider &&
|
||||
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||
) {
|
||||
embeddingModel = Object.keys(
|
||||
embeddingModelProviders[embeddingModelProvider],
|
||||
)[0];
|
||||
localStorage.setItem('embeddingModel', embeddingModel);
|
||||
}
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
const providers: MinimalProvider[] = data.providers;
|
||||
|
||||
if (providers.length === 0) {
|
||||
throw new Error(
|
||||
'No chat model providers found, please configure them in the settings page.',
|
||||
);
|
||||
}
|
||||
|
||||
const chatModelProvider =
|
||||
providers.find((p) => p.id === chatModelProviderId) ??
|
||||
providers.find((p) => p.chatModels.length > 0);
|
||||
|
||||
if (!chatModelProvider) {
|
||||
throw new Error(
|
||||
'No chat models found, pleae configure them in the settings page.',
|
||||
);
|
||||
}
|
||||
|
||||
chatModelProviderId = chatModelProvider.id;
|
||||
|
||||
const chatModel =
|
||||
chatModelProvider.chatModels.find((m) => m.key === chatModelKey) ??
|
||||
chatModelProvider.chatModels[0];
|
||||
chatModelKey = chatModel.key;
|
||||
|
||||
const embeddingModelProvider =
|
||||
providers.find((p) => p.id === embeddingModelProviderId) ??
|
||||
providers.find((p) => p.embeddingModels.length > 0);
|
||||
|
||||
if (!embeddingModelProvider) {
|
||||
throw new Error(
|
||||
'No embedding models found, pleae configure them in the settings page.',
|
||||
);
|
||||
}
|
||||
|
||||
embeddingModelProviderId = embeddingModelProvider.id;
|
||||
|
||||
const embeddingModel =
|
||||
embeddingModelProvider.embeddingModels.find(
|
||||
(m) => m.key === embeddingModelKey,
|
||||
) ?? embeddingModelProvider.embeddingModels[0];
|
||||
embeddingModelKey = embeddingModel.key;
|
||||
|
||||
localStorage.setItem('chatModelKey', chatModelKey);
|
||||
localStorage.setItem('chatModelProviderId', chatModelProviderId);
|
||||
localStorage.setItem('embeddingModelKey', embeddingModelKey);
|
||||
localStorage.setItem('embeddingModelProviderId', embeddingModelProviderId);
|
||||
|
||||
setChatModelProvider({
|
||||
key: chatModelKey,
|
||||
providerId: chatModelProviderId,
|
||||
name: chatModel!,
|
||||
provider: chatModelProvider,
|
||||
});
|
||||
|
||||
setEmbeddingModelProvider({
|
||||
key: embeddingModelKey,
|
||||
providerId: embeddingModelProviderId,
|
||||
name: embeddingModel!,
|
||||
provider: embeddingModelProvider,
|
||||
});
|
||||
|
||||
setIsConfigReady(true);
|
||||
} catch (err: any) {
|
||||
} catch (err) {
|
||||
console.error('An error occurred while checking the configuration:', err);
|
||||
toast.error(err.message);
|
||||
setIsConfigReady(false);
|
||||
setHasError(true);
|
||||
}
|
||||
@@ -295,15 +356,15 @@ export const ChatProvider = ({
|
||||
|
||||
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
|
||||
{
|
||||
key: '',
|
||||
providerId: '',
|
||||
name: '',
|
||||
provider: '',
|
||||
},
|
||||
);
|
||||
|
||||
const [embeddingModelProvider, setEmbeddingModelProvider] =
|
||||
useState<EmbeddingModelProvider>({
|
||||
key: '',
|
||||
providerId: '',
|
||||
name: '',
|
||||
provider: '',
|
||||
});
|
||||
|
||||
const [isConfigReady, setIsConfigReady] = useState(false);
|
||||
@@ -681,12 +742,12 @@ export const ChatProvider = ({
|
||||
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
|
||||
: chatHistory,
|
||||
chatModel: {
|
||||
key: chatModelProvider.key,
|
||||
providerId: chatModelProvider.providerId,
|
||||
name: chatModelProvider.name,
|
||||
provider: chatModelProvider.provider,
|
||||
},
|
||||
embeddingModel: {
|
||||
key: embeddingModelProvider.key,
|
||||
providerId: embeddingModelProvider.providerId,
|
||||
name: embeddingModelProvider.name,
|
||||
provider: embeddingModelProvider.provider,
|
||||
},
|
||||
systemInstructions: localStorage.getItem('systemInstructions'),
|
||||
}),
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
|
||||
abstract class BaseModelProvider<CONFIG> {
|
||||
constructor(
|
||||
protected id: string,
|
||||
protected name: string,
|
||||
protected config: CONFIG,
|
||||
) {}
|
||||
abstract getDefaultModels(): Promise<ModelList>;
|
||||
abstract getModelList(): Promise<ModelList>;
|
||||
abstract loadChatModel(modelName: string): Promise<BaseChatModel>;
|
||||
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>;
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
throw new Error('Method not Implemented.');
|
||||
}
|
||||
static parseAndValidate(raw: any): any {
|
||||
/* Static methods can't access class type parameters */
|
||||
throw new Error('Method not Implemented.');
|
||||
}
|
||||
}
|
||||
|
||||
export type ProviderConstructor<CONFIG> = {
|
||||
new (id: string, name: string, config: CONFIG): BaseModelProvider<CONFIG>;
|
||||
parseAndValidate(raw: any): CONFIG;
|
||||
getProviderConfigFields: () => UIConfigField[];
|
||||
getProviderMetadata: () => ProviderMetadata;
|
||||
};
|
||||
|
||||
export const createProviderInstance = <P extends ProviderConstructor<any>>(
|
||||
Provider: P,
|
||||
id: string,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): InstanceType<P> => {
|
||||
const cfg = Provider.parseAndValidate(rawConfig);
|
||||
return new Provider(id, name, cfg) as InstanceType<P>;
|
||||
};
|
||||
|
||||
export default BaseModelProvider;
|
||||
@@ -1,21 +0,0 @@
|
||||
import { ModelProviderUISection } from '@/lib/config/types';
|
||||
import { ProviderConstructor } from './baseProvider';
|
||||
import OpenAIProvider from './openai';
|
||||
|
||||
export const providers: Record<string, ProviderConstructor<any>> = {
|
||||
openai: OpenAIProvider,
|
||||
};
|
||||
|
||||
export const getModelProvidersUIConfigSection =
|
||||
(): ModelProviderUISection[] => {
|
||||
return Object.entries(providers).map(([k, p]) => {
|
||||
const configFields = p.getProviderConfigFields();
|
||||
const metadata = p.getProviderMetadata();
|
||||
|
||||
return {
|
||||
fields: configFields,
|
||||
key: k,
|
||||
name: metadata.name,
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -1,214 +0,0 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface OpenAIConfig {
|
||||
apiKey: string;
|
||||
baseURL: string;
|
||||
}
|
||||
|
||||
const defaultChatModels: Model[] = [
|
||||
{
|
||||
name: 'GPT-3.5 Turbo',
|
||||
key: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4',
|
||||
key: 'gpt-4',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4 turbo',
|
||||
key: 'gpt-4-turbo',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4 omni',
|
||||
key: 'gpt-4o',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4o (2024-05-13)',
|
||||
key: 'gpt-4o-2024-05-13',
|
||||
},
|
||||
{
|
||||
name: 'GPT-4 omni mini',
|
||||
key: 'gpt-4o-mini',
|
||||
},
|
||||
{
|
||||
name: 'GPT 4.1 nano',
|
||||
key: 'gpt-4.1-nano',
|
||||
},
|
||||
{
|
||||
name: 'GPT 4.1 mini',
|
||||
key: 'gpt-4.1-mini',
|
||||
},
|
||||
{
|
||||
name: 'GPT 4.1',
|
||||
key: 'gpt-4.1',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5 nano',
|
||||
key: 'gpt-5-nano',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5',
|
||||
key: 'gpt-5',
|
||||
},
|
||||
{
|
||||
name: 'GPT 5 Mini',
|
||||
key: 'gpt-5-mini',
|
||||
},
|
||||
{
|
||||
name: 'o1',
|
||||
key: 'o1',
|
||||
},
|
||||
{
|
||||
name: 'o3',
|
||||
key: 'o3',
|
||||
},
|
||||
{
|
||||
name: 'o3 Mini',
|
||||
key: 'o3-mini',
|
||||
},
|
||||
{
|
||||
name: 'o4 Mini',
|
||||
key: 'o4-mini',
|
||||
},
|
||||
];
|
||||
|
||||
const defaultEmbeddingModels: Model[] = [
|
||||
{
|
||||
name: 'Text Embedding 3 Small',
|
||||
key: 'text-embedding-3-small',
|
||||
},
|
||||
{
|
||||
name: 'Text Embedding 3 Large',
|
||||
key: 'text-embedding-3-large',
|
||||
},
|
||||
];
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your OpenAI API key',
|
||||
required: true,
|
||||
placeholder: 'OpenAI API Key',
|
||||
env: 'OPENAI_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for the OpenAI API',
|
||||
required: true,
|
||||
placeholder: 'OpenAI Base URL',
|
||||
default: 'https://api.openai.com/v1',
|
||||
env: 'OPENAI_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
|
||||
constructor(id: string, name: string, config: OpenAIConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
if (this.config.baseURL === 'https://api.openai.com/v1') {
|
||||
return {
|
||||
embedding: defaultEmbeddingModels,
|
||||
chat: defaultChatModels,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: [],
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading OpenAI Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.config.baseURL,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new OpenAIEmbeddings({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.config.baseURL,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): OpenAIConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey || !raw.baseURL)
|
||||
throw new Error(
|
||||
'Invalid config provided. API key and base URL must be provided',
|
||||
);
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
baseURL: String(raw.baseURL),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'openai',
|
||||
name: 'OpenAI',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default OpenAIProvider;
|
||||
@@ -1,78 +0,0 @@
|
||||
import { ConfigModelProvider } from '../config/types';
|
||||
import BaseModelProvider, {
|
||||
createProviderInstance,
|
||||
} from './providers/baseProvider';
|
||||
import { getConfiguredModelProviders } from '../config/serverRegistry';
|
||||
import { providers } from './providers';
|
||||
import { MinimalProvider, Model } from './types';
|
||||
|
||||
class ModelRegistry {
|
||||
activeProviders: (ConfigModelProvider & {
|
||||
provider: BaseModelProvider<any>;
|
||||
})[] = [];
|
||||
|
||||
constructor() {
|
||||
this.initializeActiveProviders();
|
||||
}
|
||||
|
||||
private initializeActiveProviders() {
|
||||
const configuredProviders = getConfiguredModelProviders();
|
||||
|
||||
configuredProviders.forEach((p) => {
|
||||
try {
|
||||
const provider = providers[p.type];
|
||||
if (!provider) throw new Error('Invalid provider type');
|
||||
|
||||
this.activeProviders.push({
|
||||
...p,
|
||||
provider: createProviderInstance(provider, p.id, p.name, p.config),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`Failed to initialize provider. Type: ${p.type}, ID: ${p.id}, Config: ${JSON.stringify(p.config)}, Error: ${err}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async getActiveProviders() {
|
||||
const providers: MinimalProvider[] = [];
|
||||
|
||||
await Promise.all(
|
||||
this.activeProviders.map(async (p) => {
|
||||
const m = await p.provider.getModelList();
|
||||
|
||||
providers.push({
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
chatModels: m.chat,
|
||||
embeddingModels: m.embedding,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
return providers;
|
||||
}
|
||||
|
||||
async loadChatModel(providerId: string, modelName: string) {
|
||||
const provider = this.activeProviders.find((p) => p.id === providerId);
|
||||
|
||||
if (!provider) throw new Error('Invalid provider id');
|
||||
|
||||
const model = await provider.provider.loadChatModel(modelName);
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(providerId: string, modelName: string) {
|
||||
const provider = this.activeProviders.find((p) => p.id === providerId);
|
||||
|
||||
if (!provider) throw new Error('Invalid provider id');
|
||||
|
||||
const model = await provider.provider.loadEmbeddingModel(modelName);
|
||||
|
||||
return model;
|
||||
}
|
||||
}
|
||||
|
||||
export default ModelRegistry;
|
||||
@@ -1,34 +0,0 @@
|
||||
type Model = {
|
||||
name: string;
|
||||
key: string;
|
||||
};
|
||||
|
||||
type ModelList = {
|
||||
embedding: Model[];
|
||||
chat: Model[];
|
||||
};
|
||||
|
||||
type ProviderMetadata = {
|
||||
name: string;
|
||||
key: string;
|
||||
};
|
||||
|
||||
type MinimalProvider = {
|
||||
id: string;
|
||||
name: string;
|
||||
chatModels: Model[];
|
||||
embeddingModels: Model[];
|
||||
};
|
||||
|
||||
type ModelWithProvider = {
|
||||
key: string;
|
||||
providerId: string;
|
||||
};
|
||||
|
||||
export type {
|
||||
Model,
|
||||
ModelList,
|
||||
ProviderMetadata,
|
||||
MinimalProvider,
|
||||
ModelWithProvider,
|
||||
};
|
||||
94
src/lib/providers/aimlapi.ts
Normal file
94
src/lib/providers/aimlapi.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { getAimlApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import axios from 'axios';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'aimlapi',
|
||||
displayName: 'AI/ML API',
|
||||
};
|
||||
|
||||
interface AimlApiModel {
|
||||
id: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
}
|
||||
|
||||
const API_URL = 'https://api.aimlapi.com';
|
||||
|
||||
export const loadAimlApiChatModels = async () => {
|
||||
const apiKey = getAimlApiKey();
|
||||
|
||||
if (!apiKey) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${API_URL}/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
response.data.data.forEach((model: AimlApiModel) => {
|
||||
if (model.type === 'chat-completion') {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: apiKey,
|
||||
modelName: model.id,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: API_URL,
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading AI/ML API models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadAimlApiEmbeddingModels = async () => {
|
||||
const apiKey = getAimlApiKey();
|
||||
|
||||
if (!apiKey) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${API_URL}/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
response.data.data.forEach((model: AimlApiModel) => {
|
||||
if (model.type === 'embedding') {
|
||||
embeddingModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: apiKey,
|
||||
modelName: model.id,
|
||||
configuration: {
|
||||
baseURL: API_URL,
|
||||
},
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading AI/ML API embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
78
src/lib/providers/anthropic.ts
Normal file
78
src/lib/providers/anthropic.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { ChatAnthropic } from '@langchain/anthropic';
|
||||
import { ChatModel } from '.';
|
||||
import { getAnthropicApiKey } from '../config';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'anthropic',
|
||||
displayName: 'Anthropic',
|
||||
};
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
const anthropicChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Claude 4.1 Opus',
|
||||
key: 'claude-opus-4-1-20250805',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 4 Opus',
|
||||
key: 'claude-opus-4-20250514',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 4 Sonnet',
|
||||
key: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.7 Sonnet',
|
||||
key: 'claude-3-7-sonnet-20250219',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.5 Haiku',
|
||||
key: 'claude-3-5-haiku-20241022',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.5 Sonnet v2',
|
||||
key: 'claude-3-5-sonnet-20241022',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3.5 Sonnet',
|
||||
key: 'claude-3-5-sonnet-20240620',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3 Opus',
|
||||
key: 'claude-3-opus-20240229',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3 Sonnet',
|
||||
key: 'claude-3-sonnet-20240229',
|
||||
},
|
||||
{
|
||||
displayName: 'Claude 3 Haiku',
|
||||
key: 'claude-3-haiku-20240307',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadAnthropicChatModels = async () => {
|
||||
const anthropicApiKey = getAnthropicApiKey();
|
||||
|
||||
if (!anthropicApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
anthropicChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatAnthropic({
|
||||
apiKey: anthropicApiKey,
|
||||
modelName: model.key,
|
||||
temperature: 0.7,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Anthropic models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
49
src/lib/providers/deepseek.ts
Normal file
49
src/lib/providers/deepseek.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getDeepseekApiKey } from '../config';
|
||||
import { ChatModel } from '.';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'deepseek',
|
||||
displayName: 'Deepseek AI',
|
||||
};
|
||||
|
||||
const deepseekChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Deepseek Chat (Deepseek V3)',
|
||||
key: 'deepseek-chat',
|
||||
},
|
||||
{
|
||||
displayName: 'Deepseek Reasoner (Deepseek R1)',
|
||||
key: 'deepseek-reasoner',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadDeepseekChatModels = async () => {
|
||||
const deepseekApiKey = getDeepseekApiKey();
|
||||
|
||||
if (!deepseekApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
deepseekChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: deepseekApiKey,
|
||||
modelName: model.key,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: 'https://api.deepseek.com',
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Deepseek models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
114
src/lib/providers/gemini.ts
Normal file
114
src/lib/providers/gemini.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import {
|
||||
ChatGoogleGenerativeAI,
|
||||
GoogleGenerativeAIEmbeddings,
|
||||
} from '@langchain/google-genai';
|
||||
import { getGeminiApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'gemini',
|
||||
displayName: 'Google Gemini',
|
||||
};
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
const geminiChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Gemini 2.5 Flash',
|
||||
key: 'gemini-2.5-flash',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.5 Flash-Lite',
|
||||
key: 'gemini-2.5-flash-lite',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.5 Pro',
|
||||
key: 'gemini-2.5-pro',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash',
|
||||
key: 'gemini-2.0-flash',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash-Lite',
|
||||
key: 'gemini-2.0-flash-lite',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash Thinking Experimental',
|
||||
key: 'gemini-2.0-flash-thinking-exp-01-21',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Flash',
|
||||
key: 'gemini-1.5-flash',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Flash-8B',
|
||||
key: 'gemini-1.5-flash-8b',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Pro',
|
||||
key: 'gemini-1.5-pro',
|
||||
},
|
||||
];
|
||||
|
||||
const geminiEmbeddingModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Text Embedding 004',
|
||||
key: 'models/text-embedding-004',
|
||||
},
|
||||
{
|
||||
displayName: 'Embedding 001',
|
||||
key: 'models/embedding-001',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadGeminiChatModels = async () => {
|
||||
const geminiApiKey = getGeminiApiKey();
|
||||
|
||||
if (!geminiApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
geminiChatModels.forEach((model) => {
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatGoogleGenerativeAI({
|
||||
apiKey: geminiApiKey,
|
||||
model: model.key,
|
||||
temperature: 0.7,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Gemini models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadGeminiEmbeddingModels = async () => {
|
||||
const geminiApiKey = getGeminiApiKey();
|
||||
|
||||
if (!geminiApiKey) return {};
|
||||
|
||||
try {
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
geminiEmbeddingModels.forEach((model) => {
|
||||
embeddingModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new GoogleGenerativeAIEmbeddings({
|
||||
apiKey: geminiApiKey,
|
||||
modelName: model.key,
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Gemini embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
44
src/lib/providers/groq.ts
Normal file
44
src/lib/providers/groq.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { ChatGroq } from '@langchain/groq';
|
||||
import { getGroqApiKey } from '../config';
|
||||
import { ChatModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'groq',
|
||||
displayName: 'Groq',
|
||||
};
|
||||
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
export const loadGroqChatModels = async () => {
|
||||
const groqApiKey = getGroqApiKey();
|
||||
if (!groqApiKey) return {};
|
||||
|
||||
try {
|
||||
const res = await fetch('https://api.groq.com/openai/v1/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `bearer ${groqApiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const groqChatModels = (await res.json()).data;
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
groqChatModels.forEach((model: any) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.id,
|
||||
model: new ChatGroq({
|
||||
apiKey: groqApiKey,
|
||||
model: model.id,
|
||||
temperature: 0.7,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Groq models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
170
src/lib/providers/index.ts
Normal file
170
src/lib/providers/index.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import {
|
||||
loadOpenAIChatModels,
|
||||
loadOpenAIEmbeddingModels,
|
||||
PROVIDER_INFO as OpenAIInfo,
|
||||
PROVIDER_INFO,
|
||||
} from './openai';
|
||||
import {
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
} from '../config';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
loadOllamaChatModels,
|
||||
loadOllamaEmbeddingModels,
|
||||
PROVIDER_INFO as OllamaInfo,
|
||||
} from './ollama';
|
||||
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
|
||||
import {
|
||||
loadAnthropicChatModels,
|
||||
PROVIDER_INFO as AnthropicInfo,
|
||||
} from './anthropic';
|
||||
import {
|
||||
loadGeminiChatModels,
|
||||
loadGeminiEmbeddingModels,
|
||||
PROVIDER_INFO as GeminiInfo,
|
||||
} from './gemini';
|
||||
import {
|
||||
loadTransformersEmbeddingsModels,
|
||||
PROVIDER_INFO as TransformersInfo,
|
||||
} from './transformers';
|
||||
import {
|
||||
loadDeepseekChatModels,
|
||||
PROVIDER_INFO as DeepseekInfo,
|
||||
} from './deepseek';
|
||||
import {
|
||||
loadAimlApiChatModels,
|
||||
loadAimlApiEmbeddingModels,
|
||||
PROVIDER_INFO as AimlApiInfo,
|
||||
} from './aimlapi';
|
||||
import {
|
||||
loadLMStudioChatModels,
|
||||
loadLMStudioEmbeddingsModels,
|
||||
PROVIDER_INFO as LMStudioInfo,
|
||||
} from './lmstudio';
|
||||
import {
|
||||
loadLemonadeChatModels,
|
||||
loadLemonadeEmbeddingModels,
|
||||
PROVIDER_INFO as LemonadeInfo,
|
||||
} from './lemonade';
|
||||
|
||||
export const PROVIDER_METADATA = {
|
||||
openai: OpenAIInfo,
|
||||
ollama: OllamaInfo,
|
||||
groq: GroqInfo,
|
||||
anthropic: AnthropicInfo,
|
||||
gemini: GeminiInfo,
|
||||
transformers: TransformersInfo,
|
||||
deepseek: DeepseekInfo,
|
||||
aimlapi: AimlApiInfo,
|
||||
lmstudio: LMStudioInfo,
|
||||
lemonade: LemonadeInfo,
|
||||
custom_openai: {
|
||||
key: 'custom_openai',
|
||||
displayName: 'Custom OpenAI',
|
||||
},
|
||||
};
|
||||
|
||||
export interface ChatModel {
|
||||
displayName: string;
|
||||
model: BaseChatModel;
|
||||
}
|
||||
|
||||
export interface EmbeddingModel {
|
||||
displayName: string;
|
||||
model: Embeddings;
|
||||
}
|
||||
|
||||
export const chatModelProviders: Record<
|
||||
string,
|
||||
() => Promise<Record<string, ChatModel>>
|
||||
> = {
|
||||
openai: loadOpenAIChatModels,
|
||||
ollama: loadOllamaChatModels,
|
||||
groq: loadGroqChatModels,
|
||||
anthropic: loadAnthropicChatModels,
|
||||
gemini: loadGeminiChatModels,
|
||||
deepseek: loadDeepseekChatModels,
|
||||
aimlapi: loadAimlApiChatModels,
|
||||
lmstudio: loadLMStudioChatModels,
|
||||
lemonade: loadLemonadeChatModels,
|
||||
};
|
||||
|
||||
export const embeddingModelProviders: Record<
|
||||
string,
|
||||
() => Promise<Record<string, EmbeddingModel>>
|
||||
> = {
|
||||
openai: loadOpenAIEmbeddingModels,
|
||||
ollama: loadOllamaEmbeddingModels,
|
||||
gemini: loadGeminiEmbeddingModels,
|
||||
transformers: loadTransformersEmbeddingsModels,
|
||||
aimlapi: loadAimlApiEmbeddingModels,
|
||||
lmstudio: loadLMStudioEmbeddingsModels,
|
||||
lemonade: loadLemonadeEmbeddingModels,
|
||||
};
|
||||
|
||||
export const getAvailableChatModelProviders = async () => {
|
||||
const models: Record<string, Record<string, ChatModel>> = {};
|
||||
|
||||
for (const provider in chatModelProviders) {
|
||||
const providerModels = await chatModelProviders[provider]();
|
||||
if (Object.keys(providerModels).length > 0) {
|
||||
models[provider] = providerModels;
|
||||
}
|
||||
}
|
||||
|
||||
const customOpenAiApiKey = getCustomOpenaiApiKey();
|
||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
||||
const customOpenAiModelName = getCustomOpenaiModelName();
|
||||
|
||||
models['custom_openai'] = {
|
||||
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
||||
? {
|
||||
[customOpenAiModelName]: {
|
||||
displayName: customOpenAiModelName,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: customOpenAiApiKey,
|
||||
modelName: customOpenAiModelName,
|
||||
...(() => {
|
||||
const temperatureRestrictedModels = [
|
||||
'gpt-5-nano',
|
||||
'gpt-5',
|
||||
'gpt-5-mini',
|
||||
'o1',
|
||||
'o3',
|
||||
'o3-mini',
|
||||
'o4-mini',
|
||||
];
|
||||
const isTemperatureRestricted =
|
||||
temperatureRestrictedModels.some((restrictedModel) =>
|
||||
customOpenAiModelName.includes(restrictedModel),
|
||||
);
|
||||
return isTemperatureRestricted ? {} : { temperature: 0.7 };
|
||||
})(),
|
||||
configuration: {
|
||||
baseURL: customOpenAiApiUrl,
|
||||
},
|
||||
}) as unknown as BaseChatModel,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
return models;
|
||||
};
|
||||
|
||||
export const getAvailableEmbeddingModelProviders = async () => {
|
||||
const models: Record<string, Record<string, EmbeddingModel>> = {};
|
||||
|
||||
for (const provider in embeddingModelProviders) {
|
||||
const providerModels = await embeddingModelProviders[provider]();
|
||||
if (Object.keys(providerModels).length > 0) {
|
||||
models[provider] = providerModels;
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
};
|
||||
94
src/lib/providers/lemonade.ts
Normal file
94
src/lib/providers/lemonade.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import axios from 'axios';
|
||||
import { getLemonadeApiEndpoint, getLemonadeApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'lemonade',
|
||||
displayName: 'Lemonade',
|
||||
};
|
||||
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||
|
||||
export const loadLemonadeChatModels = async () => {
|
||||
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
|
||||
const lemonadeApiKey = getLemonadeApiKey();
|
||||
|
||||
if (!lemonadeApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(lemonadeApiKey
|
||||
? { Authorization: `Bearer ${lemonadeApiKey}` }
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const { data: models } = res.data;
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
models.forEach((model: any) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.id,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: lemonadeApiKey || 'lemonade-key',
|
||||
modelName: model.id,
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: `${lemonadeApiEndpoint}/api/v1`,
|
||||
},
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Lemonade models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadLemonadeEmbeddingModels = async () => {
|
||||
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
|
||||
const lemonadeApiKey = getLemonadeApiKey();
|
||||
|
||||
if (!lemonadeApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(lemonadeApiKey
|
||||
? { Authorization: `Bearer ${lemonadeApiKey}` }
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const { data: models } = res.data;
|
||||
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
// Filter models that support embeddings (if Lemonade provides this info)
|
||||
// For now, we'll assume all models can be used for embeddings
|
||||
models.forEach((model: any) => {
|
||||
embeddingModels[model.id] = {
|
||||
displayName: model.id,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: lemonadeApiKey || 'lemonade-key',
|
||||
modelName: model.id,
|
||||
configuration: {
|
||||
baseURL: `${lemonadeApiEndpoint}/api/v1`,
|
||||
},
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Lemonade embedding models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
100
src/lib/providers/lmstudio.ts
Normal file
100
src/lib/providers/lmstudio.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { getKeepAlive, getLMStudioApiEndpoint } from '../config';
|
||||
import axios from 'axios';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'lmstudio',
|
||||
displayName: 'LM Studio',
|
||||
};
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
interface LMStudioModel {
|
||||
id: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
const ensureV1Endpoint = (endpoint: string): string =>
|
||||
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
|
||||
|
||||
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
||||
try {
|
||||
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const loadLMStudioChatModels = async () => {
|
||||
const endpoint = getLMStudioApiEndpoint();
|
||||
|
||||
if (!endpoint) return {};
|
||||
if (!(await checkServerAvailability(endpoint))) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
response.data.data.forEach((model: LMStudioModel) => {
|
||||
chatModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new ChatOpenAI({
|
||||
apiKey: 'lm-studio',
|
||||
configuration: {
|
||||
baseURL: ensureV1Endpoint(endpoint),
|
||||
},
|
||||
modelName: model.id,
|
||||
temperature: 0.7,
|
||||
streaming: true,
|
||||
maxRetries: 3,
|
||||
}) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading LM Studio models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadLMStudioEmbeddingsModels = async () => {
|
||||
const endpoint = getLMStudioApiEndpoint();
|
||||
|
||||
if (!endpoint) return {};
|
||||
if (!(await checkServerAvailability(endpoint))) return {};
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
const embeddingsModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
response.data.data.forEach((model: LMStudioModel) => {
|
||||
embeddingsModels[model.id] = {
|
||||
displayName: model.name || model.id,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: 'lm-studio',
|
||||
configuration: {
|
||||
baseURL: ensureV1Endpoint(endpoint),
|
||||
},
|
||||
modelName: model.id,
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingsModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading LM Studio embeddings model: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
86
src/lib/providers/ollama.ts
Normal file
86
src/lib/providers/ollama.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import axios from 'axios';
|
||||
import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'ollama',
|
||||
displayName: 'Ollama',
|
||||
};
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { OllamaEmbeddings } from '@langchain/ollama';
|
||||
|
||||
export const loadOllamaChatModels = async () => {
|
||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
||||
const ollamaApiKey = getOllamaApiKey();
|
||||
|
||||
if (!ollamaApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const { models } = res.data;
|
||||
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
models.forEach((model: any) => {
|
||||
chatModels[model.model] = {
|
||||
displayName: model.name,
|
||||
model: new ChatOllama({
|
||||
baseUrl: ollamaApiEndpoint,
|
||||
model: model.model,
|
||||
temperature: 0.7,
|
||||
keepAlive: getKeepAlive(),
|
||||
...(ollamaApiKey
|
||||
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
|
||||
: {}),
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Ollama models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadOllamaEmbeddingModels = async () => {
|
||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
||||
const ollamaApiKey = getOllamaApiKey();
|
||||
|
||||
if (!ollamaApiEndpoint) return {};
|
||||
|
||||
try {
|
||||
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const { models } = res.data;
|
||||
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
models.forEach((model: any) => {
|
||||
embeddingModels[model.model] = {
|
||||
displayName: model.name,
|
||||
model: new OllamaEmbeddings({
|
||||
baseUrl: ollamaApiEndpoint,
|
||||
model: model.model,
|
||||
...(ollamaApiKey
|
||||
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
|
||||
: {}),
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Ollama embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
159
src/lib/providers/openai.ts
Normal file
159
src/lib/providers/openai.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { getOpenaiApiKey } from '../config';
|
||||
import { ChatModel, EmbeddingModel } from '.';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'openai',
|
||||
displayName: 'OpenAI',
|
||||
};
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
const openaiChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'GPT-3.5 Turbo',
|
||||
key: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4',
|
||||
key: 'gpt-4',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4 turbo',
|
||||
key: 'gpt-4-turbo',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4 omni',
|
||||
key: 'gpt-4o',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4o (2024-05-13)',
|
||||
key: 'gpt-4o-2024-05-13',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT-4 omni mini',
|
||||
key: 'gpt-4o-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 4.1 nano',
|
||||
key: 'gpt-4.1-nano',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 4.1 mini',
|
||||
key: 'gpt-4.1-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 4.1',
|
||||
key: 'gpt-4.1',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 5 nano',
|
||||
key: 'gpt-5-nano',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 5',
|
||||
key: 'gpt-5',
|
||||
},
|
||||
{
|
||||
displayName: 'GPT 5 Mini',
|
||||
key: 'gpt-5-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'o1',
|
||||
key: 'o1',
|
||||
},
|
||||
{
|
||||
displayName: 'o3',
|
||||
key: 'o3',
|
||||
},
|
||||
{
|
||||
displayName: 'o3 Mini',
|
||||
key: 'o3-mini',
|
||||
},
|
||||
{
|
||||
displayName: 'o4 Mini',
|
||||
key: 'o4-mini',
|
||||
},
|
||||
];
|
||||
|
||||
const openaiEmbeddingModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Text Embedding 3 Small',
|
||||
key: 'text-embedding-3-small',
|
||||
},
|
||||
{
|
||||
displayName: 'Text Embedding 3 Large',
|
||||
key: 'text-embedding-3-large',
|
||||
},
|
||||
];
|
||||
|
||||
export const loadOpenAIChatModels = async () => {
|
||||
const openaiApiKey = getOpenaiApiKey();
|
||||
|
||||
if (!openaiApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels: Record<string, ChatModel> = {};
|
||||
|
||||
openaiChatModels.forEach((model) => {
|
||||
// Models that only support temperature = 1
|
||||
const temperatureRestrictedModels = [
|
||||
'gpt-5-nano',
|
||||
'gpt-5',
|
||||
'gpt-5-mini',
|
||||
'o1',
|
||||
'o3',
|
||||
'o3-mini',
|
||||
'o4-mini',
|
||||
];
|
||||
const isTemperatureRestricted = temperatureRestrictedModels.some(
|
||||
(restrictedModel) => model.key.includes(restrictedModel),
|
||||
);
|
||||
|
||||
const modelConfig: any = {
|
||||
apiKey: openaiApiKey,
|
||||
modelName: model.key,
|
||||
};
|
||||
|
||||
// Only add temperature if the model supports it
|
||||
if (!isTemperatureRestricted) {
|
||||
modelConfig.temperature = 0.7;
|
||||
}
|
||||
|
||||
chatModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
|
||||
};
|
||||
});
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading OpenAI models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadOpenAIEmbeddingModels = async () => {
|
||||
const openaiApiKey = getOpenaiApiKey();
|
||||
|
||||
if (!openaiApiKey) return {};
|
||||
|
||||
try {
|
||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
||||
|
||||
openaiEmbeddingModels.forEach((model) => {
|
||||
embeddingModels[model.key] = {
|
||||
displayName: model.displayName,
|
||||
model: new OpenAIEmbeddings({
|
||||
apiKey: openaiApiKey,
|
||||
modelName: model.key,
|
||||
}) as unknown as Embeddings,
|
||||
};
|
||||
});
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading OpenAI embeddings models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
36
src/lib/providers/transformers.ts
Normal file
36
src/lib/providers/transformers.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
||||
|
||||
export const PROVIDER_INFO = {
|
||||
key: 'transformers',
|
||||
displayName: 'Hugging Face',
|
||||
};
|
||||
|
||||
export const loadTransformersEmbeddingsModels = async () => {
|
||||
try {
|
||||
const embeddingModels = {
|
||||
'xenova-bge-small-en-v1.5': {
|
||||
displayName: 'BGE Small',
|
||||
model: new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/bge-small-en-v1.5',
|
||||
}),
|
||||
},
|
||||
'xenova-gte-small': {
|
||||
displayName: 'GTE Small',
|
||||
model: new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/gte-small',
|
||||
}),
|
||||
},
|
||||
'xenova-bert-base-multilingual-uncased': {
|
||||
displayName: 'Bert Multilingual',
|
||||
model: new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/bert-base-multilingual-uncased',
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
console.error(`Error loading Transformers embeddings model: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
@@ -1,5 +1,5 @@
|
||||
import axios from 'axios';
|
||||
import { getSearxngURL } from './config/serverRegistry';
|
||||
import { getSearxngApiEndpoint } from './config';
|
||||
|
||||
interface SearxngSearchOptions {
|
||||
categories?: string[];
|
||||
@@ -23,7 +23,7 @@ export const searchSearxng = async (
|
||||
query: string,
|
||||
opts?: SearxngSearchOptions,
|
||||
) => {
|
||||
const searxngURL = getSearxngURL();
|
||||
const searxngURL = getSearxngApiEndpoint();
|
||||
|
||||
const url = new URL(`${searxngURL}/search?format=json`);
|
||||
url.searchParams.append('q', query);
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import crypto from 'crypto';
|
||||
|
||||
export const hashObj = (obj: { [key: string]: any }) => {
|
||||
const json = JSON.stringify(obj, Object.keys(obj).sort());
|
||||
const hash = crypto.createHash('sha256').update(json).digest('hex');
|
||||
return hash;
|
||||
};
|
||||
5
src/lib/types/compute-dot.d.ts
vendored
Normal file
5
src/lib/types/compute-dot.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
declare function computeDot(vectorA: number[], vectorB: number[]): number;
|
||||
|
||||
declare module 'compute-dot' {
|
||||
export default computeDot;
|
||||
}
|
||||
@@ -1,7 +1,17 @@
|
||||
import dot from 'compute-dot';
|
||||
import cosineSimilarity from 'compute-cosine-similarity';
|
||||
import { getSimilarityMeasure } from '../config';
|
||||
|
||||
const computeSimilarity = (x: number[], y: number[]): number => {
|
||||
return cosineSimilarity(x, y) as number;
|
||||
const similarityMeasure = getSimilarityMeasure();
|
||||
|
||||
if (similarityMeasure === 'cosine') {
|
||||
return cosineSimilarity(x, y) as number;
|
||||
} else if (similarityMeasure === 'dot') {
|
||||
return dot(x, y);
|
||||
}
|
||||
|
||||
throw new Error('Invalid similarity measure');
|
||||
};
|
||||
|
||||
export default computeSimilarity;
|
||||
|
||||
@@ -2,17 +2,17 @@ import type { Config } from 'tailwindcss';
|
||||
import type { DefaultColors } from 'tailwindcss/types/generated/colors';
|
||||
|
||||
const themeDark = (colors: DefaultColors) => ({
|
||||
50: '#0d1117',
|
||||
100: '#161b22',
|
||||
200: '#21262d',
|
||||
300: '#30363d',
|
||||
50: '#111116',
|
||||
100: '#1f202b',
|
||||
200: '#2d2f3f',
|
||||
300: '#3a3c4c',
|
||||
});
|
||||
|
||||
const themeLight = (colors: DefaultColors) => ({
|
||||
50: '#ffffff',
|
||||
100: '#f6f8fa',
|
||||
200: '#d0d7de',
|
||||
300: '#afb8c1',
|
||||
100: '#f1f5f9',
|
||||
200: '#c4c7c5',
|
||||
300: '#9ca3af',
|
||||
});
|
||||
|
||||
const config: Config = {
|
||||
@@ -49,9 +49,6 @@ const config: Config = {
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
require('@tailwindcss/typography'),
|
||||
require('@headlessui/tailwindcss')({ prefix: 'headless' }),
|
||||
],
|
||||
plugins: [require('@tailwindcss/typography')],
|
||||
};
|
||||
export default config;
|
||||
|
||||
158
yarn.lock
158
yarn.lock
@@ -39,13 +39,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
|
||||
integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
|
||||
|
||||
"@cspotcode/source-map-support@^0.8.0":
|
||||
version "0.8.1"
|
||||
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
|
||||
integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==
|
||||
dependencies:
|
||||
"@jridgewell/trace-mapping" "0.3.9"
|
||||
|
||||
"@dabh/diagnostics@^2.0.2":
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
|
||||
@@ -414,11 +407,6 @@
|
||||
"@react-aria/interactions" "^3.21.3"
|
||||
"@tanstack/react-virtual" "^3.8.1"
|
||||
|
||||
"@headlessui/tailwindcss@^0.2.2":
|
||||
version "0.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@headlessui/tailwindcss/-/tailwindcss-0.2.2.tgz#8ebde73fabca72d48636ea56ae790209dc5f0d49"
|
||||
integrity sha512-xNe42KjdyA4kfUKLLPGzME9zkH7Q3rOZ5huFihWNWOQFxnItxPB3/67yBI8/qBfY8nwBRx5GHn4VprsoluVMGw==
|
||||
|
||||
"@huggingface/jinja@^0.2.2":
|
||||
version "0.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
|
||||
@@ -587,7 +575,7 @@
|
||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||
"@jridgewell/trace-mapping" "^0.3.24"
|
||||
|
||||
"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0":
|
||||
"@jridgewell/resolve-uri@^3.1.0":
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6"
|
||||
integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==
|
||||
@@ -602,14 +590,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32"
|
||||
integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==
|
||||
|
||||
"@jridgewell/trace-mapping@0.3.9":
|
||||
version "0.3.9"
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9"
|
||||
integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==
|
||||
dependencies:
|
||||
"@jridgewell/resolve-uri" "^3.0.3"
|
||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||
|
||||
"@jridgewell/trace-mapping@^0.3.24":
|
||||
version "0.3.25"
|
||||
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0"
|
||||
@@ -681,6 +661,33 @@
|
||||
groq-sdk "^0.19.0"
|
||||
zod "^3.22.4"
|
||||
|
||||
"@langchain/langgraph-checkpoint@^0.1.1":
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/langgraph-checkpoint/-/langgraph-checkpoint-0.1.1.tgz#500569a02af4b85172d775de63eeba06afa0c189"
|
||||
integrity sha512-h2bP0RUikQZu0Um1ZUPErQLXyhzroJqKRbRcxYRTAh49oNlsfeq4A3K4YEDRbGGuyPZI/Jiqwhks1wZwY73AZw==
|
||||
dependencies:
|
||||
uuid "^10.0.0"
|
||||
|
||||
"@langchain/langgraph-sdk@~0.1.0":
|
||||
version "0.1.9"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/langgraph-sdk/-/langgraph-sdk-0.1.9.tgz#5442bd1a4257b5d94927af6e09b0aed341ae8a1d"
|
||||
integrity sha512-7WEDHtbI3pYPUiiHq+dPaF92ZN2W7lqObdpK0X+roa8zPdHUjve/HiqYuKNWS12u1N+L5QIuQWqZvVNvUA7BfQ==
|
||||
dependencies:
|
||||
"@types/json-schema" "^7.0.15"
|
||||
p-queue "^6.6.2"
|
||||
p-retry "4"
|
||||
uuid "^9.0.0"
|
||||
|
||||
"@langchain/langgraph@^0.4.9":
|
||||
version "0.4.9"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.4.9.tgz#470a238ea98662d6ec9dfc42859a00acad00fc81"
|
||||
integrity sha512-+rcdTGi4Ium4X/VtIX3Zw4RhxEkYWpwUyz806V6rffjHOAMamg6/WZDxpJbrP33RV/wJG1GH12Z29oX3Pqq3Aw==
|
||||
dependencies:
|
||||
"@langchain/langgraph-checkpoint" "^0.1.1"
|
||||
"@langchain/langgraph-sdk" "~0.1.0"
|
||||
uuid "^10.0.0"
|
||||
zod "^3.25.32"
|
||||
|
||||
"@langchain/ollama@^0.2.3":
|
||||
version "0.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-0.2.3.tgz#4868e66db4fc480f08c42fc652274abbab0416f0"
|
||||
@@ -947,26 +954,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4"
|
||||
integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw==
|
||||
|
||||
"@tsconfig/node10@^1.0.7":
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2"
|
||||
integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==
|
||||
|
||||
"@tsconfig/node12@^1.0.7":
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d"
|
||||
integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==
|
||||
|
||||
"@tsconfig/node14@^1.0.0":
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1"
|
||||
integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==
|
||||
|
||||
"@tsconfig/node16@^1.0.2":
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9"
|
||||
integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==
|
||||
|
||||
"@types/better-sqlite3@^7.6.12":
|
||||
version "7.6.12"
|
||||
resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7"
|
||||
@@ -979,6 +966,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/html-to-text/-/html-to-text-9.0.4.tgz#4a83dd8ae8bfa91457d0b1ffc26f4d0537eff58c"
|
||||
integrity sha512-pUY3cKH/Nm2yYrEmDlPR1mR7yszjGx4DrwPjQ702C4/D5CwHuZTgZdIdwPkRbcuhs7BAh2L5rg3CL5cbRiGTCQ==
|
||||
|
||||
"@types/json-schema@^7.0.15":
|
||||
version "7.0.15"
|
||||
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
|
||||
integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==
|
||||
|
||||
"@types/json5@^0.0.29":
|
||||
version "0.0.29"
|
||||
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
|
||||
@@ -1159,18 +1151,6 @@ acorn-jsx@^5.3.2:
|
||||
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
|
||||
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
|
||||
|
||||
acorn-walk@^8.1.1:
|
||||
version "8.3.4"
|
||||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7"
|
||||
integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==
|
||||
dependencies:
|
||||
acorn "^8.11.0"
|
||||
|
||||
acorn@^8.11.0, acorn@^8.4.1:
|
||||
version "8.15.0"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816"
|
||||
integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==
|
||||
|
||||
acorn@^8.9.0:
|
||||
version "8.11.3"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
||||
@@ -1233,11 +1213,6 @@ anymatch@~3.1.2:
|
||||
normalize-path "^3.0.0"
|
||||
picomatch "^2.0.4"
|
||||
|
||||
arg@^4.1.0:
|
||||
version "4.1.3"
|
||||
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
|
||||
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
|
||||
|
||||
arg@^5.0.2:
|
||||
version "5.0.2"
|
||||
resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
|
||||
@@ -1796,11 +1771,6 @@ core-util-is@~1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
||||
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
||||
|
||||
create-require@^1.1.0:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
|
||||
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
|
||||
|
||||
cross-fetch@^3.1.5:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3"
|
||||
@@ -1945,11 +1915,6 @@ didyoumean@^1.2.2:
|
||||
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
|
||||
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
|
||||
|
||||
diff@^4.0.1:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
|
||||
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
||||
|
||||
dingbat-to-unicode@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83"
|
||||
@@ -2709,15 +2674,6 @@ fraction.js@^4.3.7:
|
||||
resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7"
|
||||
integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==
|
||||
|
||||
framer-motion@^12.23.24:
|
||||
version "12.23.24"
|
||||
resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-12.23.24.tgz#4895b67e880bd2b1089e61fbaa32ae802fc24b8c"
|
||||
integrity sha512-HMi5HRoRCTou+3fb3h9oTLyJGBxHfW+HnNE25tAXOvVx/IvwMHK0cx7IR4a2ZU6sh3IX1Z+4ts32PcYBOqka8w==
|
||||
dependencies:
|
||||
motion-dom "^12.23.23"
|
||||
motion-utils "^12.23.6"
|
||||
tslib "^2.4.0"
|
||||
|
||||
fs-constants@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
|
||||
@@ -3586,11 +3542,6 @@ lucide-react@^0.363.0:
|
||||
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3"
|
||||
integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ==
|
||||
|
||||
make-error@^1.1.1:
|
||||
version "1.3.6"
|
||||
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
|
||||
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
|
||||
|
||||
mammoth@^1.9.1:
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf"
|
||||
@@ -3683,18 +3634,6 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
|
||||
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
|
||||
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
|
||||
|
||||
motion-dom@^12.23.23:
|
||||
version "12.23.23"
|
||||
resolved "https://registry.yarnpkg.com/motion-dom/-/motion-dom-12.23.23.tgz#8f874333ea1a04ee3a89eb928f518b463d589e0e"
|
||||
integrity sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==
|
||||
dependencies:
|
||||
motion-utils "^12.23.6"
|
||||
|
||||
motion-utils@^12.23.6:
|
||||
version "12.23.6"
|
||||
resolved "https://registry.yarnpkg.com/motion-utils/-/motion-utils-12.23.6.tgz#fafef80b4ea85122dd0d6c599a0c63d72881f312"
|
||||
integrity sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==
|
||||
|
||||
ms@2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
|
||||
@@ -5064,25 +5003,6 @@ ts-interface-checker@^0.1.9:
|
||||
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
|
||||
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
|
||||
|
||||
ts-node@^10.9.2:
|
||||
version "10.9.2"
|
||||
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f"
|
||||
integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==
|
||||
dependencies:
|
||||
"@cspotcode/source-map-support" "^0.8.0"
|
||||
"@tsconfig/node10" "^1.0.7"
|
||||
"@tsconfig/node12" "^1.0.7"
|
||||
"@tsconfig/node14" "^1.0.0"
|
||||
"@tsconfig/node16" "^1.0.2"
|
||||
acorn "^8.4.1"
|
||||
acorn-walk "^8.1.1"
|
||||
arg "^4.1.0"
|
||||
create-require "^1.1.0"
|
||||
diff "^4.0.1"
|
||||
make-error "^1.1.1"
|
||||
v8-compile-cache-lib "^3.0.1"
|
||||
yn "3.1.1"
|
||||
|
||||
tsconfig-paths@^3.15.0:
|
||||
version "3.15.0"
|
||||
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4"
|
||||
@@ -5245,16 +5165,11 @@ uuid@^11.1.0:
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.1.0.tgz#9549028be1753bb934fc96e2bca09bb4105ae912"
|
||||
integrity sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==
|
||||
|
||||
uuid@^9.0.1:
|
||||
uuid@^9.0.0, uuid@^9.0.1:
|
||||
version "9.0.1"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
|
||||
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
|
||||
|
||||
v8-compile-cache-lib@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
|
||||
integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==
|
||||
|
||||
validate.io-array@^1.0.3, validate.io-array@^1.0.5:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d"
|
||||
@@ -5472,11 +5387,6 @@ yet-another-react-lightbox@^3.17.2:
|
||||
resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c"
|
||||
integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ==
|
||||
|
||||
yn@3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
|
||||
|
||||
yocto-queue@^0.1.0:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
||||
|
||||
Reference in New Issue
Block a user