mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-28 07:48:15 +00:00
Compare commits
21 Commits
v1.11.0-rc
...
b67ca79e2a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b67ca79e2a | ||
|
|
626cb646e2 | ||
|
|
410201b117 | ||
|
|
30fb1e312b | ||
|
|
cc5eea17e4 | ||
|
|
4ee3173368 | ||
|
|
6d61528347 | ||
|
|
c02e535f4c | ||
|
|
a375de73cc | ||
|
|
87226957f1 | ||
|
|
77743949c7 | ||
|
|
64c4514cad | ||
|
|
999553877d | ||
|
|
e45a9af9ff | ||
|
|
e7fbab12ed | ||
|
|
387da5dbdd | ||
|
|
3003d44544 | ||
|
|
f1e6aa9c1a | ||
|
|
f39638fe02 | ||
|
|
535c0b9897 | ||
|
|
47350b34ec |
@@ -15,9 +15,6 @@ COPY drizzle ./drizzle
|
|||||||
RUN mkdir -p /home/perplexica/data
|
RUN mkdir -p /home/perplexica/data
|
||||||
RUN yarn build
|
RUN yarn build
|
||||||
|
|
||||||
RUN yarn add --dev @vercel/ncc
|
|
||||||
RUN yarn ncc build ./src/lib/db/migrate.ts -o migrator
|
|
||||||
|
|
||||||
FROM node:24.5.0-slim
|
FROM node:24.5.0-slim
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/*
|
RUN apt-get update && apt-get install -y python3 python3-pip sqlite3 && rm -rf /var/lib/apt/lists/*
|
||||||
@@ -30,8 +27,6 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static
|
|||||||
COPY --from=builder /home/perplexica/.next/standalone ./
|
COPY --from=builder /home/perplexica/.next/standalone ./
|
||||||
COPY --from=builder /home/perplexica/data ./data
|
COPY --from=builder /home/perplexica/data ./data
|
||||||
COPY drizzle ./drizzle
|
COPY drizzle ./drizzle
|
||||||
COPY --from=builder /home/perplexica/migrator/build ./build
|
|
||||||
COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js
|
|
||||||
|
|
||||||
RUN mkdir /home/perplexica/uploads
|
RUN mkdir /home/perplexica/uploads
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
node migrate.js
|
|
||||||
|
|
||||||
exec node server.js
|
exec node server.js
|
||||||
@@ -5,14 +5,14 @@
|
|||||||
"author": "ItzCrazyKns",
|
"author": "ItzCrazyKns",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "next dev",
|
"dev": "next dev",
|
||||||
"build": "npm run db:migrate && next build",
|
"build": "next build",
|
||||||
"start": "next start",
|
"start": "next start",
|
||||||
"lint": "next lint",
|
"lint": "next lint",
|
||||||
"format:write": "prettier . --write",
|
"format:write": "prettier . --write"
|
||||||
"db:migrate": "node ./src/lib/db/migrate.ts"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@headlessui/react": "^2.2.0",
|
"@headlessui/react": "^2.2.0",
|
||||||
|
"@headlessui/tailwindcss": "^0.2.2",
|
||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||||
"@langchain/anthropic": "^0.3.24",
|
"@langchain/anthropic": "^0.3.24",
|
||||||
@@ -65,6 +65,7 @@
|
|||||||
"postcss": "^8",
|
"postcss": "^8",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"tailwindcss": "^3.3.0",
|
"tailwindcss": "^3.3.0",
|
||||||
|
"ts-node": "^10.9.2",
|
||||||
"typescript": "^5"
|
"typescript": "^5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,47 +0,0 @@
|
|||||||
import {
|
|
||||||
getAvailableChatModelProviders,
|
|
||||||
getAvailableEmbeddingModelProviders,
|
|
||||||
} from '@/lib/providers';
|
|
||||||
|
|
||||||
export const GET = async (req: Request) => {
|
|
||||||
try {
|
|
||||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
|
||||||
getAvailableChatModelProviders(),
|
|
||||||
getAvailableEmbeddingModelProviders(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
Object.keys(chatModelProviders).forEach((provider) => {
|
|
||||||
Object.keys(chatModelProviders[provider]).forEach((model) => {
|
|
||||||
delete (chatModelProviders[provider][model] as { model?: unknown })
|
|
||||||
.model;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
Object.keys(embeddingModelProviders).forEach((provider) => {
|
|
||||||
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
|
|
||||||
delete (embeddingModelProviders[provider][model] as { model?: unknown })
|
|
||||||
.model;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json(
|
|
||||||
{
|
|
||||||
chatModelProviders,
|
|
||||||
embeddingModelProviders,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
status: 200,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.error('An error occurred while fetching models', err);
|
|
||||||
return Response.json(
|
|
||||||
{
|
|
||||||
message: 'An error has occurred.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
status: 500,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
28
src/app/api/providers/route.ts
Normal file
28
src/app/api/providers/route.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
|
|
||||||
|
export const GET = async (req: Request) => {
|
||||||
|
try {
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const activeProviders = await registry.getActiveProviders();
|
||||||
|
|
||||||
|
return Response.json(
|
||||||
|
{
|
||||||
|
providers: activeProviders,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('An error occurred while fetching providers', err);
|
||||||
|
return Response.json(
|
||||||
|
{
|
||||||
|
message: 'An error has occurred.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 500,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -63,7 +63,7 @@ const Focus = () => {
|
|||||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
|
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
|
||||||
<PopoverButton
|
<PopoverButton
|
||||||
type="button"
|
type="button"
|
||||||
className=" text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
className="active:border-none headless-open:text-black dark:headless-open:text-white text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||||
>
|
>
|
||||||
{focusMode !== 'webSearch' ? (
|
{focusMode !== 'webSearch' ? (
|
||||||
<div className="flex flex-row items-center space-x-1">
|
<div className="flex flex-row items-center space-x-1">
|
||||||
|
|||||||
13
src/instrumentation.ts
Normal file
13
src/instrumentation.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
export const register = async () => {
|
||||||
|
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
||||||
|
try {
|
||||||
|
console.log('Running database migrations...');
|
||||||
|
await import('./lib/db/migrate');
|
||||||
|
console.log('Database migrations completed successfully');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to run database migrations:', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
await import('./lib/config/index');
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
import toml from '@iarna/toml';
|
|
||||||
|
|
||||||
// Use dynamic imports for Node.js modules to prevent client-side errors
|
|
||||||
let fs: any;
|
|
||||||
let path: any;
|
|
||||||
if (typeof window === 'undefined') {
|
|
||||||
// We're on the server
|
|
||||||
fs = require('fs');
|
|
||||||
path = require('path');
|
|
||||||
}
|
|
||||||
|
|
||||||
const configFileName = 'config.toml';
|
|
||||||
|
|
||||||
interface Config {
|
|
||||||
GENERAL: {
|
|
||||||
SIMILARITY_MEASURE: string;
|
|
||||||
KEEP_ALIVE: string;
|
|
||||||
};
|
|
||||||
MODELS: {
|
|
||||||
OPENAI: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
GROQ: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
ANTHROPIC: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
GEMINI: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
OLLAMA: {
|
|
||||||
API_URL: string;
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
DEEPSEEK: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
AIMLAPI: {
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
LM_STUDIO: {
|
|
||||||
API_URL: string;
|
|
||||||
};
|
|
||||||
LEMONADE: {
|
|
||||||
API_URL: string;
|
|
||||||
API_KEY: string;
|
|
||||||
};
|
|
||||||
CUSTOM_OPENAI: {
|
|
||||||
API_URL: string;
|
|
||||||
API_KEY: string;
|
|
||||||
MODEL_NAME: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
API_ENDPOINTS: {
|
|
||||||
SEARXNG: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
type RecursivePartial<T> = {
|
|
||||||
[P in keyof T]?: RecursivePartial<T[P]>;
|
|
||||||
};
|
|
||||||
|
|
||||||
const loadConfig = () => {
|
|
||||||
// Server-side only
|
|
||||||
if (typeof window === 'undefined') {
|
|
||||||
return toml.parse(
|
|
||||||
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
|
||||||
) as any as Config;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-side fallback - settings will be loaded via API
|
|
||||||
return {} as Config;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getSimilarityMeasure = () =>
|
|
||||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
|
||||||
|
|
||||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
|
||||||
|
|
||||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
|
||||||
|
|
||||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
|
||||||
|
|
||||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
|
||||||
|
|
||||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
|
||||||
|
|
||||||
export const getSearxngApiEndpoint = () =>
|
|
||||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
|
||||||
|
|
||||||
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
|
|
||||||
|
|
||||||
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
|
||||||
|
|
||||||
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;
|
|
||||||
|
|
||||||
export const getCustomOpenaiApiKey = () =>
|
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
|
||||||
|
|
||||||
export const getCustomOpenaiApiUrl = () =>
|
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
|
||||||
|
|
||||||
export const getCustomOpenaiModelName = () =>
|
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
|
||||||
|
|
||||||
export const getLMStudioApiEndpoint = () =>
|
|
||||||
loadConfig().MODELS.LM_STUDIO.API_URL;
|
|
||||||
|
|
||||||
export const getLemonadeApiEndpoint = () =>
|
|
||||||
loadConfig().MODELS.LEMONADE.API_URL;
|
|
||||||
|
|
||||||
export const getLemonadeApiKey = () => loadConfig().MODELS.LEMONADE.API_KEY;
|
|
||||||
|
|
||||||
const mergeConfigs = (current: any, update: any): any => {
|
|
||||||
if (update === null || update === undefined) {
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof current !== 'object' || current === null) {
|
|
||||||
return update;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = { ...current };
|
|
||||||
|
|
||||||
for (const key in update) {
|
|
||||||
if (Object.prototype.hasOwnProperty.call(update, key)) {
|
|
||||||
const updateValue = update[key];
|
|
||||||
|
|
||||||
if (
|
|
||||||
typeof updateValue === 'object' &&
|
|
||||||
updateValue !== null &&
|
|
||||||
typeof result[key] === 'object' &&
|
|
||||||
result[key] !== null
|
|
||||||
) {
|
|
||||||
result[key] = mergeConfigs(result[key], updateValue);
|
|
||||||
} else if (updateValue !== undefined) {
|
|
||||||
result[key] = updateValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
|
||||||
// Server-side only
|
|
||||||
if (typeof window === 'undefined') {
|
|
||||||
const currentConfig = loadConfig();
|
|
||||||
const mergedConfig = mergeConfigs(currentConfig, config);
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(path.join(process.cwd(), `${configFileName}`)),
|
|
||||||
toml.stringify(mergedConfig),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
16
src/lib/config/clientRegistry.ts
Normal file
16
src/lib/config/clientRegistry.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
const getClientConfig = (key: string, defaultVal?: any) => {
|
||||||
|
return localStorage.getItem(key) ?? defaultVal ?? undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getTheme = () => getClientConfig('theme', 'dark');
|
||||||
|
|
||||||
|
export const getAutoImageSearch = () =>
|
||||||
|
Boolean(getClientConfig('autoImageSearch', 'true'));
|
||||||
|
|
||||||
|
export const getAutoVideoSearch = () =>
|
||||||
|
Boolean(getClientConfig('autoVideoSearch', 'true'));
|
||||||
|
|
||||||
|
export const getSystemInstructions = () =>
|
||||||
|
getClientConfig('systemInstructions', '');
|
||||||
213
src/lib/config/index.ts
Normal file
213
src/lib/config/index.ts
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
import path from 'node:path';
|
||||||
|
import fs from 'fs';
|
||||||
|
import { Config, ConfigModelProvider, UIConfigSections } from './types';
|
||||||
|
import { hashObj } from '../serverUtils';
|
||||||
|
import { getModelProvidersUIConfigSection } from '../models/providers';
|
||||||
|
|
||||||
|
class ConfigManager {
|
||||||
|
configPath: string = path.join(
|
||||||
|
process.env.DATA_DIR || process.cwd(),
|
||||||
|
'/data/config.json',
|
||||||
|
);
|
||||||
|
configVersion = 1;
|
||||||
|
currentConfig: Config = {
|
||||||
|
version: this.configVersion,
|
||||||
|
setupComplete: false,
|
||||||
|
general: {},
|
||||||
|
modelProviders: [],
|
||||||
|
};
|
||||||
|
uiConfigSections: UIConfigSections = {
|
||||||
|
general: [],
|
||||||
|
modelProviders: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.initialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
private initialize() {
|
||||||
|
this.initializeConfig();
|
||||||
|
this.initializeFromEnv();
|
||||||
|
}
|
||||||
|
|
||||||
|
private saveConfig() {
|
||||||
|
fs.writeFileSync(
|
||||||
|
this.configPath,
|
||||||
|
JSON.stringify(this.currentConfig, null, 2),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeConfig() {
|
||||||
|
const exists = fs.existsSync(this.configPath);
|
||||||
|
if (!exists) {
|
||||||
|
fs.writeFileSync(
|
||||||
|
this.configPath,
|
||||||
|
JSON.stringify(this.currentConfig, null, 2),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
this.currentConfig = JSON.parse(
|
||||||
|
fs.readFileSync(this.configPath, 'utf-8'),
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof SyntaxError) {
|
||||||
|
console.error(
|
||||||
|
`Error parsing config file at ${this.configPath}:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
'Loading default config and overwriting the existing file.',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
this.configPath,
|
||||||
|
JSON.stringify(this.currentConfig, null, 2),
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
console.log('Unknown error reading config file:', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentConfig = this.migrateConfig(this.currentConfig);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private migrateConfig(config: Config): Config {
|
||||||
|
/* TODO: Add migrations */
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeFromEnv() {
|
||||||
|
const providerConfigSections = getModelProvidersUIConfigSection();
|
||||||
|
|
||||||
|
this.uiConfigSections.modelProviders = providerConfigSections;
|
||||||
|
|
||||||
|
const newProviders: ConfigModelProvider[] = [];
|
||||||
|
|
||||||
|
providerConfigSections.forEach((provider) => {
|
||||||
|
const newProvider: ConfigModelProvider & { required?: string[] } = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
name: `${provider.name} ${Math.floor(Math.random() * 1000)}`,
|
||||||
|
type: provider.key,
|
||||||
|
chatModels: [],
|
||||||
|
embeddingModels: [],
|
||||||
|
config: {},
|
||||||
|
required: [],
|
||||||
|
hash: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
provider.fields.forEach((field) => {
|
||||||
|
newProvider.config[field.key] =
|
||||||
|
process.env[field.env!] ||
|
||||||
|
field.default ||
|
||||||
|
''; /* Env var must exist for providers */
|
||||||
|
|
||||||
|
if (field.required) newProvider.required?.push(field.key);
|
||||||
|
});
|
||||||
|
|
||||||
|
let configured = true;
|
||||||
|
|
||||||
|
newProvider.required?.forEach((r) => {
|
||||||
|
if (!newProvider.config[r]) {
|
||||||
|
configured = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (configured) {
|
||||||
|
const hash = hashObj(newProvider.config);
|
||||||
|
newProvider.hash = hash;
|
||||||
|
delete newProvider.required;
|
||||||
|
|
||||||
|
const exists = this.currentConfig.modelProviders.find(
|
||||||
|
(p) => p.hash === hash,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!exists) {
|
||||||
|
newProviders.push(newProvider);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.currentConfig.modelProviders.push(...newProviders);
|
||||||
|
|
||||||
|
this.saveConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
public getConfig(key: string, defaultValue?: any): any {
|
||||||
|
const nested = key.split('.');
|
||||||
|
let obj: any = this.currentConfig;
|
||||||
|
|
||||||
|
for (let i = 0; i < nested.length; i++) {
|
||||||
|
const part = nested[i];
|
||||||
|
if (obj == null) return defaultValue;
|
||||||
|
|
||||||
|
obj = obj[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj === undefined ? defaultValue : obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
public updateConfig(key: string, val: any) {
|
||||||
|
const parts = key.split('.');
|
||||||
|
if (parts.length === 0) return;
|
||||||
|
|
||||||
|
let target: any = this.currentConfig;
|
||||||
|
for (let i = 0; i < parts.length - 1; i++) {
|
||||||
|
const part = parts[i];
|
||||||
|
if (target[part] === null || typeof target[part] !== 'object') {
|
||||||
|
target[part] = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
target = target[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalKey = parts[parts.length - 1];
|
||||||
|
target[finalKey] = val;
|
||||||
|
|
||||||
|
this.saveConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
public addModelProvider(type: string, name: string, config: any) {
|
||||||
|
const newModelProvider: ConfigModelProvider = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
config,
|
||||||
|
chatModels: [],
|
||||||
|
embeddingModels: [],
|
||||||
|
hash: hashObj(config),
|
||||||
|
};
|
||||||
|
|
||||||
|
this.currentConfig.modelProviders.push(newModelProvider);
|
||||||
|
this.saveConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
public removeModelProvider(id: string) {
|
||||||
|
const index = this.currentConfig.modelProviders.findIndex(
|
||||||
|
(p) => p.id === id,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (index === -1) return;
|
||||||
|
|
||||||
|
this.currentConfig.modelProviders =
|
||||||
|
this.currentConfig.modelProviders.filter((p) => p.id !== id);
|
||||||
|
|
||||||
|
this.saveConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
public isSetupComplete() {
|
||||||
|
return this.currentConfig.setupComplete
|
||||||
|
}
|
||||||
|
|
||||||
|
public markSetupComplete() {
|
||||||
|
if (!this.currentConfig.setupComplete) {
|
||||||
|
this.currentConfig.setupComplete = true
|
||||||
|
}
|
||||||
|
|
||||||
|
this.saveConfig()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const configManager = new ConfigManager();
|
||||||
|
|
||||||
|
export default configManager;
|
||||||
14
src/lib/config/serverRegistry.ts
Normal file
14
src/lib/config/serverRegistry.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import configManager from './index';
|
||||||
|
import { ConfigModelProvider } from './types';
|
||||||
|
|
||||||
|
export const getConfiguredModelProviders = (): ConfigModelProvider[] => {
|
||||||
|
return configManager.getConfig('modelProviders', []);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getConfiguredModelProviderById = (
|
||||||
|
id: string,
|
||||||
|
): ConfigModelProvider | undefined => {
|
||||||
|
return getConfiguredModelProviders().find((p) => p.id === id) ?? undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getSearxngURL = () => configManager.getConfig('search.searxngURL', '')
|
||||||
89
src/lib/config/types.ts
Normal file
89
src/lib/config/types.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import { Model } from '../models/types';
|
||||||
|
|
||||||
|
type BaseUIConfigField = {
|
||||||
|
name: string;
|
||||||
|
key: string;
|
||||||
|
required: boolean;
|
||||||
|
description: string;
|
||||||
|
scope: 'client' | 'server';
|
||||||
|
env?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type StringUIConfigField = BaseUIConfigField & {
|
||||||
|
type: 'string';
|
||||||
|
placeholder?: string;
|
||||||
|
default?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type SelectUIConfigFieldOptions = {
|
||||||
|
name: string;
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type SelectUIConfigField = BaseUIConfigField & {
|
||||||
|
type: 'select';
|
||||||
|
default?: string;
|
||||||
|
options: SelectUIConfigFieldOptions[];
|
||||||
|
};
|
||||||
|
|
||||||
|
type PasswordUIConfigField = BaseUIConfigField & {
|
||||||
|
type: 'password';
|
||||||
|
placeholder?: string;
|
||||||
|
default?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type UIConfigField =
|
||||||
|
| StringUIConfigField
|
||||||
|
| SelectUIConfigField
|
||||||
|
| PasswordUIConfigField;
|
||||||
|
|
||||||
|
type ConfigModelProvider = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
chatModels: Model[];
|
||||||
|
embeddingModels: Model[];
|
||||||
|
config: { [key: string]: any };
|
||||||
|
hash: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type Config = {
|
||||||
|
version: number;
|
||||||
|
setupComplete: boolean;
|
||||||
|
general: {
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
|
modelProviders: ConfigModelProvider[];
|
||||||
|
search: {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
type EnvMap = {
|
||||||
|
[key: string]: {
|
||||||
|
fieldKey: string;
|
||||||
|
providerKey: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type ModelProviderUISection = {
|
||||||
|
name: string;
|
||||||
|
key: string;
|
||||||
|
fields: UIConfigField[];
|
||||||
|
};
|
||||||
|
|
||||||
|
type UIConfigSections = {
|
||||||
|
general: UIConfigField[];
|
||||||
|
modelProviders: ModelProviderUISection[];
|
||||||
|
search: UIConfigField[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type {
|
||||||
|
UIConfigField,
|
||||||
|
Config,
|
||||||
|
EnvMap,
|
||||||
|
UIConfigSections,
|
||||||
|
ModelProviderUISection,
|
||||||
|
ConfigModelProvider,
|
||||||
|
};
|
||||||
@@ -20,6 +20,7 @@ import crypto from 'crypto';
|
|||||||
import { useSearchParams } from 'next/navigation';
|
import { useSearchParams } from 'next/navigation';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { getSuggestions } from '../actions';
|
import { getSuggestions } from '../actions';
|
||||||
|
import { MinimalProvider } from '../models/types';
|
||||||
|
|
||||||
export type Section = {
|
export type Section = {
|
||||||
userMessage: UserMessage;
|
userMessage: UserMessage;
|
||||||
@@ -66,13 +67,13 @@ export interface File {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface ChatModelProvider {
|
interface ChatModelProvider {
|
||||||
name: string;
|
key: string;
|
||||||
provider: string;
|
providerId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface EmbeddingModelProvider {
|
interface EmbeddingModelProvider {
|
||||||
name: string;
|
key: string;
|
||||||
provider: string;
|
providerId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const checkConfig = async (
|
const checkConfig = async (
|
||||||
@@ -82,10 +83,12 @@ const checkConfig = async (
|
|||||||
setHasError: (hasError: boolean) => void,
|
setHasError: (hasError: boolean) => void,
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
let chatModel = localStorage.getItem('chatModel');
|
let chatModelKey = localStorage.getItem('chatModelKey');
|
||||||
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
let chatModelProviderId = localStorage.getItem('chatModelProviderId');
|
||||||
let embeddingModel = localStorage.getItem('embeddingModel');
|
let embeddingModelKey = localStorage.getItem('embeddingModelKey');
|
||||||
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
|
let embeddingModelProviderId = localStorage.getItem(
|
||||||
|
'embeddingModelProviderId',
|
||||||
|
);
|
||||||
|
|
||||||
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
const autoImageSearch = localStorage.getItem('autoImageSearch');
|
||||||
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
|
||||||
@@ -98,145 +101,81 @@ const checkConfig = async (
|
|||||||
localStorage.setItem('autoVideoSearch', 'false');
|
localStorage.setItem('autoVideoSearch', 'false');
|
||||||
}
|
}
|
||||||
|
|
||||||
const providers = await fetch(`/api/models`, {
|
const res = await fetch(`/api/providers`, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
}).then(async (res) => {
|
|
||||||
if (!res.ok)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to fetch models: ${res.status} ${res.statusText}`,
|
|
||||||
);
|
|
||||||
return res.json();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (
|
if (!res.ok) {
|
||||||
!chatModel ||
|
throw new Error(
|
||||||
!chatModelProvider ||
|
`Provider fetching failed with status code ${res.status}`,
|
||||||
!embeddingModel ||
|
);
|
||||||
!embeddingModelProvider
|
|
||||||
) {
|
|
||||||
if (!chatModel || !chatModelProvider) {
|
|
||||||
const chatModelProviders = providers.chatModelProviders;
|
|
||||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
|
||||||
|
|
||||||
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
|
|
||||||
return toast.error('No chat models available');
|
|
||||||
} else {
|
|
||||||
chatModelProvider =
|
|
||||||
chatModelProvidersKeys.find(
|
|
||||||
(provider) =>
|
|
||||||
Object.keys(chatModelProviders[provider]).length > 0,
|
|
||||||
) || chatModelProvidersKeys[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
chatModelProvider === 'custom_openai' &&
|
|
||||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0
|
|
||||||
) {
|
|
||||||
toast.error(
|
|
||||||
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
|
|
||||||
);
|
|
||||||
return setHasError(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!embeddingModel || !embeddingModelProvider) {
|
|
||||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
|
||||||
|
|
||||||
if (
|
|
||||||
!embeddingModelProviders ||
|
|
||||||
Object.keys(embeddingModelProviders).length === 0
|
|
||||||
)
|
|
||||||
return toast.error('No embedding models available');
|
|
||||||
|
|
||||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
|
||||||
embeddingModel = Object.keys(
|
|
||||||
embeddingModelProviders[embeddingModelProvider],
|
|
||||||
)[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
localStorage.setItem('chatModel', chatModel!);
|
|
||||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
|
||||||
localStorage.setItem('embeddingModel', embeddingModel!);
|
|
||||||
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
|
|
||||||
} else {
|
|
||||||
const chatModelProviders = providers.chatModelProviders;
|
|
||||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
|
||||||
|
|
||||||
if (
|
|
||||||
Object.keys(chatModelProviders).length > 0 &&
|
|
||||||
(!chatModelProviders[chatModelProvider] ||
|
|
||||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
|
|
||||||
) {
|
|
||||||
const chatModelProvidersKeys = Object.keys(chatModelProviders);
|
|
||||||
chatModelProvider =
|
|
||||||
chatModelProvidersKeys.find(
|
|
||||||
(key) => Object.keys(chatModelProviders[key]).length > 0,
|
|
||||||
) || chatModelProvidersKeys[0];
|
|
||||||
|
|
||||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
chatModelProvider &&
|
|
||||||
!chatModelProviders[chatModelProvider][chatModel]
|
|
||||||
) {
|
|
||||||
if (
|
|
||||||
chatModelProvider === 'custom_openai' &&
|
|
||||||
Object.keys(chatModelProviders[chatModelProvider]).length === 0
|
|
||||||
) {
|
|
||||||
toast.error(
|
|
||||||
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
|
|
||||||
);
|
|
||||||
return setHasError(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
chatModel = Object.keys(
|
|
||||||
chatModelProviders[
|
|
||||||
Object.keys(chatModelProviders[chatModelProvider]).length > 0
|
|
||||||
? chatModelProvider
|
|
||||||
: Object.keys(chatModelProviders)[0]
|
|
||||||
],
|
|
||||||
)[0];
|
|
||||||
|
|
||||||
localStorage.setItem('chatModel', chatModel);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
Object.keys(embeddingModelProviders).length > 0 &&
|
|
||||||
!embeddingModelProviders[embeddingModelProvider]
|
|
||||||
) {
|
|
||||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
|
||||||
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
embeddingModelProvider &&
|
|
||||||
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
|
||||||
) {
|
|
||||||
embeddingModel = Object.keys(
|
|
||||||
embeddingModelProviders[embeddingModelProvider],
|
|
||||||
)[0];
|
|
||||||
localStorage.setItem('embeddingModel', embeddingModel);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const data = await res.json();
|
||||||
|
const providers: MinimalProvider[] = data.providers;
|
||||||
|
|
||||||
|
if (providers.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
'No chat model providers found, please configure them in the settings page.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const chatModelProvider =
|
||||||
|
providers.find((p) => p.id === chatModelProviderId) ??
|
||||||
|
providers.find((p) => p.chatModels.length > 0);
|
||||||
|
|
||||||
|
if (!chatModelProvider) {
|
||||||
|
throw new Error(
|
||||||
|
'No chat models found, pleae configure them in the settings page.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
chatModelProviderId = chatModelProvider.id;
|
||||||
|
|
||||||
|
const chatModel =
|
||||||
|
chatModelProvider.chatModels.find((m) => m.key === chatModelKey) ??
|
||||||
|
chatModelProvider.chatModels[0];
|
||||||
|
chatModelKey = chatModel.key;
|
||||||
|
|
||||||
|
const embeddingModelProvider =
|
||||||
|
providers.find((p) => p.id === embeddingModelProviderId) ??
|
||||||
|
providers.find((p) => p.embeddingModels.length > 0);
|
||||||
|
|
||||||
|
if (!embeddingModelProvider) {
|
||||||
|
throw new Error(
|
||||||
|
'No embedding models found, pleae configure them in the settings page.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
embeddingModelProviderId = embeddingModelProvider.id;
|
||||||
|
|
||||||
|
const embeddingModel =
|
||||||
|
embeddingModelProvider.embeddingModels.find(
|
||||||
|
(m) => m.key === embeddingModelKey,
|
||||||
|
) ?? embeddingModelProvider.embeddingModels[0];
|
||||||
|
embeddingModelKey = embeddingModel.key;
|
||||||
|
|
||||||
|
localStorage.setItem('chatModelKey', chatModelKey);
|
||||||
|
localStorage.setItem('chatModelProviderId', chatModelProviderId);
|
||||||
|
localStorage.setItem('embeddingModelKey', embeddingModelKey);
|
||||||
|
localStorage.setItem('embeddingModelProviderId', embeddingModelProviderId);
|
||||||
|
|
||||||
setChatModelProvider({
|
setChatModelProvider({
|
||||||
name: chatModel!,
|
key: chatModelKey,
|
||||||
provider: chatModelProvider,
|
providerId: chatModelProviderId,
|
||||||
});
|
});
|
||||||
|
|
||||||
setEmbeddingModelProvider({
|
setEmbeddingModelProvider({
|
||||||
name: embeddingModel!,
|
key: embeddingModelKey,
|
||||||
provider: embeddingModelProvider,
|
providerId: embeddingModelProviderId,
|
||||||
});
|
});
|
||||||
|
|
||||||
setIsConfigReady(true);
|
setIsConfigReady(true);
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
console.error('An error occurred while checking the configuration:', err);
|
console.error('An error occurred while checking the configuration:', err);
|
||||||
|
toast.error(err.message);
|
||||||
setIsConfigReady(false);
|
setIsConfigReady(false);
|
||||||
setHasError(true);
|
setHasError(true);
|
||||||
}
|
}
|
||||||
@@ -356,15 +295,15 @@ export const ChatProvider = ({
|
|||||||
|
|
||||||
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
|
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
|
||||||
{
|
{
|
||||||
name: '',
|
key: '',
|
||||||
provider: '',
|
providerId: '',
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const [embeddingModelProvider, setEmbeddingModelProvider] =
|
const [embeddingModelProvider, setEmbeddingModelProvider] =
|
||||||
useState<EmbeddingModelProvider>({
|
useState<EmbeddingModelProvider>({
|
||||||
name: '',
|
key: '',
|
||||||
provider: '',
|
providerId: '',
|
||||||
});
|
});
|
||||||
|
|
||||||
const [isConfigReady, setIsConfigReady] = useState(false);
|
const [isConfigReady, setIsConfigReady] = useState(false);
|
||||||
@@ -742,12 +681,12 @@ export const ChatProvider = ({
|
|||||||
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
|
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
|
||||||
: chatHistory,
|
: chatHistory,
|
||||||
chatModel: {
|
chatModel: {
|
||||||
name: chatModelProvider.name,
|
key: chatModelProvider.key,
|
||||||
provider: chatModelProvider.provider,
|
providerId: chatModelProvider.providerId,
|
||||||
},
|
},
|
||||||
embeddingModel: {
|
embeddingModel: {
|
||||||
name: embeddingModelProvider.name,
|
key: embeddingModelProvider.key,
|
||||||
provider: embeddingModelProvider.provider,
|
providerId: embeddingModelProvider.providerId,
|
||||||
},
|
},
|
||||||
systemInstructions: localStorage.getItem('systemInstructions'),
|
systemInstructions: localStorage.getItem('systemInstructions'),
|
||||||
}),
|
}),
|
||||||
|
|||||||
45
src/lib/models/providers/baseProvider.ts
Normal file
45
src/lib/models/providers/baseProvider.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||||
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
|
|
||||||
|
abstract class BaseModelProvider<CONFIG> {
|
||||||
|
constructor(
|
||||||
|
protected id: string,
|
||||||
|
protected name: string,
|
||||||
|
protected config: CONFIG,
|
||||||
|
) {}
|
||||||
|
abstract getDefaultModels(): Promise<ModelList>;
|
||||||
|
abstract getModelList(): Promise<ModelList>;
|
||||||
|
abstract loadChatModel(modelName: string): Promise<BaseChatModel>;
|
||||||
|
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>;
|
||||||
|
static getProviderConfigFields(): UIConfigField[] {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
static getProviderMetadata(): ProviderMetadata {
|
||||||
|
throw new Error('Method not Implemented.');
|
||||||
|
}
|
||||||
|
static parseAndValidate(raw: any): any {
|
||||||
|
/* Static methods can't access class type parameters */
|
||||||
|
throw new Error('Method not Implemented.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ProviderConstructor<CONFIG> = {
|
||||||
|
new (id: string, name: string, config: CONFIG): BaseModelProvider<CONFIG>;
|
||||||
|
parseAndValidate(raw: any): CONFIG;
|
||||||
|
getProviderConfigFields: () => UIConfigField[];
|
||||||
|
getProviderMetadata: () => ProviderMetadata;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const createProviderInstance = <P extends ProviderConstructor<any>>(
|
||||||
|
Provider: P,
|
||||||
|
id: string,
|
||||||
|
name: string,
|
||||||
|
rawConfig: unknown,
|
||||||
|
): InstanceType<P> => {
|
||||||
|
const cfg = Provider.parseAndValidate(rawConfig);
|
||||||
|
return new Provider(id, name, cfg) as InstanceType<P>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default BaseModelProvider;
|
||||||
21
src/lib/models/providers/index.ts
Normal file
21
src/lib/models/providers/index.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import { ModelProviderUISection } from '@/lib/config/types';
|
||||||
|
import { ProviderConstructor } from './baseProvider';
|
||||||
|
import OpenAIProvider from './openai';
|
||||||
|
|
||||||
|
export const providers: Record<string, ProviderConstructor<any>> = {
|
||||||
|
openai: OpenAIProvider,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getModelProvidersUIConfigSection =
|
||||||
|
(): ModelProviderUISection[] => {
|
||||||
|
return Object.entries(providers).map(([k, p]) => {
|
||||||
|
const configFields = p.getProviderConfigFields();
|
||||||
|
const metadata = p.getProviderMetadata();
|
||||||
|
|
||||||
|
return {
|
||||||
|
fields: configFields,
|
||||||
|
key: k,
|
||||||
|
name: metadata.name,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
214
src/lib/models/providers/openai.ts
Normal file
214
src/lib/models/providers/openai.ts
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||||
|
import BaseModelProvider from './baseProvider';
|
||||||
|
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||||
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
|
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||||
|
|
||||||
|
interface OpenAIConfig {
|
||||||
|
apiKey: string;
|
||||||
|
baseURL: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultChatModels: Model[] = [
|
||||||
|
{
|
||||||
|
name: 'GPT-3.5 Turbo',
|
||||||
|
key: 'gpt-3.5-turbo',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT-4',
|
||||||
|
key: 'gpt-4',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT-4 turbo',
|
||||||
|
key: 'gpt-4-turbo',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT-4 omni',
|
||||||
|
key: 'gpt-4o',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT-4o (2024-05-13)',
|
||||||
|
key: 'gpt-4o-2024-05-13',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT-4 omni mini',
|
||||||
|
key: 'gpt-4o-mini',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT 4.1 nano',
|
||||||
|
key: 'gpt-4.1-nano',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT 4.1 mini',
|
||||||
|
key: 'gpt-4.1-mini',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT 4.1',
|
||||||
|
key: 'gpt-4.1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT 5 nano',
|
||||||
|
key: 'gpt-5-nano',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT 5',
|
||||||
|
key: 'gpt-5',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'GPT 5 Mini',
|
||||||
|
key: 'gpt-5-mini',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'o1',
|
||||||
|
key: 'o1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'o3',
|
||||||
|
key: 'o3',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'o3 Mini',
|
||||||
|
key: 'o3-mini',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'o4 Mini',
|
||||||
|
key: 'o4-mini',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const defaultEmbeddingModels: Model[] = [
|
||||||
|
{
|
||||||
|
name: 'Text Embedding 3 Small',
|
||||||
|
key: 'text-embedding-3-small',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Text Embedding 3 Large',
|
||||||
|
key: 'text-embedding-3-large',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const providerConfigFields: UIConfigField[] = [
|
||||||
|
{
|
||||||
|
type: 'password',
|
||||||
|
name: 'API Key',
|
||||||
|
key: 'apiKey',
|
||||||
|
description: 'Your OpenAI API key',
|
||||||
|
required: true,
|
||||||
|
placeholder: 'OpenAI API Key',
|
||||||
|
env: 'OPENAI_API_KEY',
|
||||||
|
scope: 'server',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'string',
|
||||||
|
name: 'Base URL',
|
||||||
|
key: 'baseURL',
|
||||||
|
description: 'The base URL for the OpenAI API',
|
||||||
|
required: true,
|
||||||
|
placeholder: 'OpenAI Base URL',
|
||||||
|
default: 'https://api.openai.com/v1',
|
||||||
|
env: 'OPENAI_BASE_URL',
|
||||||
|
scope: 'server',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
|
||||||
|
constructor(id: string, name: string, config: OpenAIConfig) {
|
||||||
|
super(id, name, config);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDefaultModels(): Promise<ModelList> {
|
||||||
|
if (this.config.baseURL === 'https://api.openai.com/v1') {
|
||||||
|
return {
|
||||||
|
embedding: defaultEmbeddingModels,
|
||||||
|
chat: defaultChatModels,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
embedding: [],
|
||||||
|
chat: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async getModelList(): Promise<ModelList> {
|
||||||
|
const defaultModels = await this.getDefaultModels();
|
||||||
|
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||||
|
|
||||||
|
return {
|
||||||
|
embedding: [
|
||||||
|
...defaultModels.embedding,
|
||||||
|
...configProvider.embeddingModels,
|
||||||
|
],
|
||||||
|
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||||
|
const modelList = await this.getModelList();
|
||||||
|
|
||||||
|
const exists = modelList.chat.find((m) => m.key === key);
|
||||||
|
|
||||||
|
if (!exists) {
|
||||||
|
throw new Error(
|
||||||
|
'Error Loading OpenAI Chat Model. Invalid Model Selected',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ChatOpenAI({
|
||||||
|
apiKey: this.config.apiKey,
|
||||||
|
temperature: 0.7,
|
||||||
|
model: key,
|
||||||
|
configuration: {
|
||||||
|
baseURL: this.config.baseURL,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||||
|
const modelList = await this.getModelList();
|
||||||
|
const exists = modelList.embedding.find((m) => m.key === key);
|
||||||
|
|
||||||
|
if (!exists) {
|
||||||
|
throw new Error(
|
||||||
|
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new OpenAIEmbeddings({
|
||||||
|
apiKey: this.config.apiKey,
|
||||||
|
model: key,
|
||||||
|
configuration: {
|
||||||
|
baseURL: this.config.baseURL,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
static parseAndValidate(raw: any): OpenAIConfig {
|
||||||
|
if (!raw || typeof raw !== 'object')
|
||||||
|
throw new Error('Invalid config provided. Expected object');
|
||||||
|
if (!raw.apiKey || !raw.baseURL)
|
||||||
|
throw new Error(
|
||||||
|
'Invalid config provided. API key and base URL must be provided',
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
apiKey: String(raw.apiKey),
|
||||||
|
baseURL: String(raw.baseURL),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static getProviderConfigFields(): UIConfigField[] {
|
||||||
|
return providerConfigFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
static getProviderMetadata(): ProviderMetadata {
|
||||||
|
return {
|
||||||
|
key: 'openai',
|
||||||
|
name: 'OpenAI',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OpenAIProvider;
|
||||||
58
src/lib/models/registry.ts
Normal file
58
src/lib/models/registry.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import { ConfigModelProvider } from '../config/types';
|
||||||
|
import BaseModelProvider, {
|
||||||
|
createProviderInstance,
|
||||||
|
} from './providers/baseProvider';
|
||||||
|
import { getConfiguredModelProviders } from '../config/serverRegistry';
|
||||||
|
import { providers } from './providers';
|
||||||
|
import { MinimalProvider, Model } from './types';
|
||||||
|
|
||||||
|
class ModelRegistry {
|
||||||
|
activeProviders: (ConfigModelProvider & {
|
||||||
|
provider: BaseModelProvider<any>;
|
||||||
|
})[] = [];
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.initializeActiveProviders();
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeActiveProviders() {
|
||||||
|
const configuredProviders = getConfiguredModelProviders();
|
||||||
|
|
||||||
|
configuredProviders.forEach((p) => {
|
||||||
|
try {
|
||||||
|
const provider = providers[p.type];
|
||||||
|
if (!provider) throw new Error('Invalid provider type');
|
||||||
|
|
||||||
|
this.activeProviders.push({
|
||||||
|
...p,
|
||||||
|
provider: createProviderInstance(provider, p.id, p.name, p.config),
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error(
|
||||||
|
`Failed to initialize provider. Type: ${p.type}, ID: ${p.id}, Config: ${JSON.stringify(p.config)}, Error: ${err}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async getActiveProviders() {
|
||||||
|
const providers: MinimalProvider[] = [];
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
this.activeProviders.map(async (p) => {
|
||||||
|
const m = await p.provider.getModelList();
|
||||||
|
|
||||||
|
providers.push({
|
||||||
|
id: p.id,
|
||||||
|
name: p.name,
|
||||||
|
chatModels: m.chat,
|
||||||
|
embeddingModels: m.embedding,
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return providers;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ModelRegistry;
|
||||||
23
src/lib/models/types.ts
Normal file
23
src/lib/models/types.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
type Model = {
|
||||||
|
name: string;
|
||||||
|
key: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ModelList = {
|
||||||
|
embedding: Model[];
|
||||||
|
chat: Model[];
|
||||||
|
};
|
||||||
|
|
||||||
|
type ProviderMetadata = {
|
||||||
|
name: string;
|
||||||
|
key: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type MinimalProvider = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
chatModels: Model[];
|
||||||
|
embeddingModels: Model[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type { Model, ModelList, ProviderMetadata, MinimalProvider };
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
|
||||||
import { getAimlApiKey } from '../config';
|
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
|
||||||
import axios from 'axios';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'aimlapi',
|
|
||||||
displayName: 'AI/ML API',
|
|
||||||
};
|
|
||||||
|
|
||||||
interface AimlApiModel {
|
|
||||||
id: string;
|
|
||||||
name?: string;
|
|
||||||
type?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const API_URL = 'https://api.aimlapi.com';
|
|
||||||
|
|
||||||
export const loadAimlApiChatModels = async () => {
|
|
||||||
const apiKey = getAimlApiKey();
|
|
||||||
|
|
||||||
if (!apiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(`${API_URL}/models`, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${apiKey}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
response.data.data.forEach((model: AimlApiModel) => {
|
|
||||||
if (model.type === 'chat-completion') {
|
|
||||||
chatModels[model.id] = {
|
|
||||||
displayName: model.name || model.id,
|
|
||||||
model: new ChatOpenAI({
|
|
||||||
apiKey: apiKey,
|
|
||||||
modelName: model.id,
|
|
||||||
temperature: 0.7,
|
|
||||||
configuration: {
|
|
||||||
baseURL: API_URL,
|
|
||||||
},
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading AI/ML API models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadAimlApiEmbeddingModels = async () => {
|
|
||||||
const apiKey = getAimlApiKey();
|
|
||||||
|
|
||||||
if (!apiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(`${API_URL}/models`, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${apiKey}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
|
||||||
|
|
||||||
response.data.data.forEach((model: AimlApiModel) => {
|
|
||||||
if (model.type === 'embedding') {
|
|
||||||
embeddingModels[model.id] = {
|
|
||||||
displayName: model.name || model.id,
|
|
||||||
model: new OpenAIEmbeddings({
|
|
||||||
apiKey: apiKey,
|
|
||||||
modelName: model.id,
|
|
||||||
configuration: {
|
|
||||||
baseURL: API_URL,
|
|
||||||
},
|
|
||||||
}) as unknown as Embeddings,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return embeddingModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading AI/ML API embeddings models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
import { ChatAnthropic } from '@langchain/anthropic';
|
|
||||||
import { ChatModel } from '.';
|
|
||||||
import { getAnthropicApiKey } from '../config';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'anthropic',
|
|
||||||
displayName: 'Anthropic',
|
|
||||||
};
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
|
|
||||||
const anthropicChatModels: Record<string, string>[] = [
|
|
||||||
{
|
|
||||||
displayName: 'Claude 4.1 Opus',
|
|
||||||
key: 'claude-opus-4-1-20250805',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 4 Opus',
|
|
||||||
key: 'claude-opus-4-20250514',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 4 Sonnet',
|
|
||||||
key: 'claude-sonnet-4-20250514',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3.7 Sonnet',
|
|
||||||
key: 'claude-3-7-sonnet-20250219',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3.5 Haiku',
|
|
||||||
key: 'claude-3-5-haiku-20241022',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3.5 Sonnet v2',
|
|
||||||
key: 'claude-3-5-sonnet-20241022',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3.5 Sonnet',
|
|
||||||
key: 'claude-3-5-sonnet-20240620',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3 Opus',
|
|
||||||
key: 'claude-3-opus-20240229',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3 Sonnet',
|
|
||||||
key: 'claude-3-sonnet-20240229',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Claude 3 Haiku',
|
|
||||||
key: 'claude-3-haiku-20240307',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const loadAnthropicChatModels = async () => {
|
|
||||||
const anthropicApiKey = getAnthropicApiKey();
|
|
||||||
|
|
||||||
if (!anthropicApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
anthropicChatModels.forEach((model) => {
|
|
||||||
chatModels[model.key] = {
|
|
||||||
displayName: model.displayName,
|
|
||||||
model: new ChatAnthropic({
|
|
||||||
apiKey: anthropicApiKey,
|
|
||||||
modelName: model.key,
|
|
||||||
temperature: 0.7,
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Anthropic models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
import { ChatOpenAI } from '@langchain/openai';
|
|
||||||
import { getDeepseekApiKey } from '../config';
|
|
||||||
import { ChatModel } from '.';
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'deepseek',
|
|
||||||
displayName: 'Deepseek AI',
|
|
||||||
};
|
|
||||||
|
|
||||||
const deepseekChatModels: Record<string, string>[] = [
|
|
||||||
{
|
|
||||||
displayName: 'Deepseek Chat (Deepseek V3)',
|
|
||||||
key: 'deepseek-chat',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Deepseek Reasoner (Deepseek R1)',
|
|
||||||
key: 'deepseek-reasoner',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const loadDeepseekChatModels = async () => {
|
|
||||||
const deepseekApiKey = getDeepseekApiKey();
|
|
||||||
|
|
||||||
if (!deepseekApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
deepseekChatModels.forEach((model) => {
|
|
||||||
chatModels[model.key] = {
|
|
||||||
displayName: model.displayName,
|
|
||||||
model: new ChatOpenAI({
|
|
||||||
apiKey: deepseekApiKey,
|
|
||||||
modelName: model.key,
|
|
||||||
temperature: 0.7,
|
|
||||||
configuration: {
|
|
||||||
baseURL: 'https://api.deepseek.com',
|
|
||||||
},
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Deepseek models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
import {
|
|
||||||
ChatGoogleGenerativeAI,
|
|
||||||
GoogleGenerativeAIEmbeddings,
|
|
||||||
} from '@langchain/google-genai';
|
|
||||||
import { getGeminiApiKey } from '../config';
|
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'gemini',
|
|
||||||
displayName: 'Google Gemini',
|
|
||||||
};
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
|
||||||
|
|
||||||
const geminiChatModels: Record<string, string>[] = [
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.5 Flash',
|
|
||||||
key: 'gemini-2.5-flash',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.5 Flash-Lite',
|
|
||||||
key: 'gemini-2.5-flash-lite',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.5 Pro',
|
|
||||||
key: 'gemini-2.5-pro',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.0 Flash',
|
|
||||||
key: 'gemini-2.0-flash',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.0 Flash-Lite',
|
|
||||||
key: 'gemini-2.0-flash-lite',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.0 Flash Thinking Experimental',
|
|
||||||
key: 'gemini-2.0-flash-thinking-exp-01-21',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 1.5 Flash',
|
|
||||||
key: 'gemini-1.5-flash',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 1.5 Flash-8B',
|
|
||||||
key: 'gemini-1.5-flash-8b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Gemini 1.5 Pro',
|
|
||||||
key: 'gemini-1.5-pro',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const geminiEmbeddingModels: Record<string, string>[] = [
|
|
||||||
{
|
|
||||||
displayName: 'Text Embedding 004',
|
|
||||||
key: 'models/text-embedding-004',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Embedding 001',
|
|
||||||
key: 'models/embedding-001',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const loadGeminiChatModels = async () => {
|
|
||||||
const geminiApiKey = getGeminiApiKey();
|
|
||||||
|
|
||||||
if (!geminiApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
geminiChatModels.forEach((model) => {
|
|
||||||
chatModels[model.key] = {
|
|
||||||
displayName: model.displayName,
|
|
||||||
model: new ChatGoogleGenerativeAI({
|
|
||||||
apiKey: geminiApiKey,
|
|
||||||
model: model.key,
|
|
||||||
temperature: 0.7,
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Gemini models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadGeminiEmbeddingModels = async () => {
|
|
||||||
const geminiApiKey = getGeminiApiKey();
|
|
||||||
|
|
||||||
if (!geminiApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
|
||||||
|
|
||||||
geminiEmbeddingModels.forEach((model) => {
|
|
||||||
embeddingModels[model.key] = {
|
|
||||||
displayName: model.displayName,
|
|
||||||
model: new GoogleGenerativeAIEmbeddings({
|
|
||||||
apiKey: geminiApiKey,
|
|
||||||
modelName: model.key,
|
|
||||||
}) as unknown as Embeddings,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return embeddingModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Gemini embeddings models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
import { ChatGroq } from '@langchain/groq';
|
|
||||||
import { getGroqApiKey } from '../config';
|
|
||||||
import { ChatModel } from '.';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'groq',
|
|
||||||
displayName: 'Groq',
|
|
||||||
};
|
|
||||||
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
|
|
||||||
export const loadGroqChatModels = async () => {
|
|
||||||
const groqApiKey = getGroqApiKey();
|
|
||||||
if (!groqApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch('https://api.groq.com/openai/v1/models', {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Authorization: `bearer ${groqApiKey}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const groqChatModels = (await res.json()).data;
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
groqChatModels.forEach((model: any) => {
|
|
||||||
chatModels[model.id] = {
|
|
||||||
displayName: model.id,
|
|
||||||
model: new ChatGroq({
|
|
||||||
apiKey: groqApiKey,
|
|
||||||
model: model.id,
|
|
||||||
temperature: 0.7,
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Groq models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,170 +0,0 @@
|
|||||||
import { Embeddings } from '@langchain/core/embeddings';
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import {
|
|
||||||
loadOpenAIChatModels,
|
|
||||||
loadOpenAIEmbeddingModels,
|
|
||||||
PROVIDER_INFO as OpenAIInfo,
|
|
||||||
PROVIDER_INFO,
|
|
||||||
} from './openai';
|
|
||||||
import {
|
|
||||||
getCustomOpenaiApiKey,
|
|
||||||
getCustomOpenaiApiUrl,
|
|
||||||
getCustomOpenaiModelName,
|
|
||||||
} from '../config';
|
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
|
||||||
import {
|
|
||||||
loadOllamaChatModels,
|
|
||||||
loadOllamaEmbeddingModels,
|
|
||||||
PROVIDER_INFO as OllamaInfo,
|
|
||||||
} from './ollama';
|
|
||||||
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
|
|
||||||
import {
|
|
||||||
loadAnthropicChatModels,
|
|
||||||
PROVIDER_INFO as AnthropicInfo,
|
|
||||||
} from './anthropic';
|
|
||||||
import {
|
|
||||||
loadGeminiChatModels,
|
|
||||||
loadGeminiEmbeddingModels,
|
|
||||||
PROVIDER_INFO as GeminiInfo,
|
|
||||||
} from './gemini';
|
|
||||||
import {
|
|
||||||
loadTransformersEmbeddingsModels,
|
|
||||||
PROVIDER_INFO as TransformersInfo,
|
|
||||||
} from './transformers';
|
|
||||||
import {
|
|
||||||
loadDeepseekChatModels,
|
|
||||||
PROVIDER_INFO as DeepseekInfo,
|
|
||||||
} from './deepseek';
|
|
||||||
import {
|
|
||||||
loadAimlApiChatModels,
|
|
||||||
loadAimlApiEmbeddingModels,
|
|
||||||
PROVIDER_INFO as AimlApiInfo,
|
|
||||||
} from './aimlapi';
|
|
||||||
import {
|
|
||||||
loadLMStudioChatModels,
|
|
||||||
loadLMStudioEmbeddingsModels,
|
|
||||||
PROVIDER_INFO as LMStudioInfo,
|
|
||||||
} from './lmstudio';
|
|
||||||
import {
|
|
||||||
loadLemonadeChatModels,
|
|
||||||
loadLemonadeEmbeddingModels,
|
|
||||||
PROVIDER_INFO as LemonadeInfo,
|
|
||||||
} from './lemonade';
|
|
||||||
|
|
||||||
export const PROVIDER_METADATA = {
|
|
||||||
openai: OpenAIInfo,
|
|
||||||
ollama: OllamaInfo,
|
|
||||||
groq: GroqInfo,
|
|
||||||
anthropic: AnthropicInfo,
|
|
||||||
gemini: GeminiInfo,
|
|
||||||
transformers: TransformersInfo,
|
|
||||||
deepseek: DeepseekInfo,
|
|
||||||
aimlapi: AimlApiInfo,
|
|
||||||
lmstudio: LMStudioInfo,
|
|
||||||
lemonade: LemonadeInfo,
|
|
||||||
custom_openai: {
|
|
||||||
key: 'custom_openai',
|
|
||||||
displayName: 'Custom OpenAI',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface ChatModel {
|
|
||||||
displayName: string;
|
|
||||||
model: BaseChatModel;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EmbeddingModel {
|
|
||||||
displayName: string;
|
|
||||||
model: Embeddings;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const chatModelProviders: Record<
|
|
||||||
string,
|
|
||||||
() => Promise<Record<string, ChatModel>>
|
|
||||||
> = {
|
|
||||||
openai: loadOpenAIChatModels,
|
|
||||||
ollama: loadOllamaChatModels,
|
|
||||||
groq: loadGroqChatModels,
|
|
||||||
anthropic: loadAnthropicChatModels,
|
|
||||||
gemini: loadGeminiChatModels,
|
|
||||||
deepseek: loadDeepseekChatModels,
|
|
||||||
aimlapi: loadAimlApiChatModels,
|
|
||||||
lmstudio: loadLMStudioChatModels,
|
|
||||||
lemonade: loadLemonadeChatModels,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const embeddingModelProviders: Record<
|
|
||||||
string,
|
|
||||||
() => Promise<Record<string, EmbeddingModel>>
|
|
||||||
> = {
|
|
||||||
openai: loadOpenAIEmbeddingModels,
|
|
||||||
ollama: loadOllamaEmbeddingModels,
|
|
||||||
gemini: loadGeminiEmbeddingModels,
|
|
||||||
transformers: loadTransformersEmbeddingsModels,
|
|
||||||
aimlapi: loadAimlApiEmbeddingModels,
|
|
||||||
lmstudio: loadLMStudioEmbeddingsModels,
|
|
||||||
lemonade: loadLemonadeEmbeddingModels,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getAvailableChatModelProviders = async () => {
|
|
||||||
const models: Record<string, Record<string, ChatModel>> = {};
|
|
||||||
|
|
||||||
for (const provider in chatModelProviders) {
|
|
||||||
const providerModels = await chatModelProviders[provider]();
|
|
||||||
if (Object.keys(providerModels).length > 0) {
|
|
||||||
models[provider] = providerModels;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const customOpenAiApiKey = getCustomOpenaiApiKey();
|
|
||||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
|
||||||
const customOpenAiModelName = getCustomOpenaiModelName();
|
|
||||||
|
|
||||||
models['custom_openai'] = {
|
|
||||||
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
|
||||||
? {
|
|
||||||
[customOpenAiModelName]: {
|
|
||||||
displayName: customOpenAiModelName,
|
|
||||||
model: new ChatOpenAI({
|
|
||||||
apiKey: customOpenAiApiKey,
|
|
||||||
modelName: customOpenAiModelName,
|
|
||||||
...(() => {
|
|
||||||
const temperatureRestrictedModels = [
|
|
||||||
'gpt-5-nano',
|
|
||||||
'gpt-5',
|
|
||||||
'gpt-5-mini',
|
|
||||||
'o1',
|
|
||||||
'o3',
|
|
||||||
'o3-mini',
|
|
||||||
'o4-mini',
|
|
||||||
];
|
|
||||||
const isTemperatureRestricted =
|
|
||||||
temperatureRestrictedModels.some((restrictedModel) =>
|
|
||||||
customOpenAiModelName.includes(restrictedModel),
|
|
||||||
);
|
|
||||||
return isTemperatureRestricted ? {} : { temperature: 0.7 };
|
|
||||||
})(),
|
|
||||||
configuration: {
|
|
||||||
baseURL: customOpenAiApiUrl,
|
|
||||||
},
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: {}),
|
|
||||||
};
|
|
||||||
|
|
||||||
return models;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getAvailableEmbeddingModelProviders = async () => {
|
|
||||||
const models: Record<string, Record<string, EmbeddingModel>> = {};
|
|
||||||
|
|
||||||
for (const provider in embeddingModelProviders) {
|
|
||||||
const providerModels = await embeddingModelProviders[provider]();
|
|
||||||
if (Object.keys(providerModels).length > 0) {
|
|
||||||
models[provider] = providerModels;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return models;
|
|
||||||
};
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getLemonadeApiEndpoint, getLemonadeApiKey } from '../config';
|
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'lemonade',
|
|
||||||
displayName: 'Lemonade',
|
|
||||||
};
|
|
||||||
|
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
|
||||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
|
||||||
|
|
||||||
export const loadLemonadeChatModels = async () => {
|
|
||||||
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
|
|
||||||
const lemonadeApiKey = getLemonadeApiKey();
|
|
||||||
|
|
||||||
if (!lemonadeApiEndpoint) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
...(lemonadeApiKey
|
|
||||||
? { Authorization: `Bearer ${lemonadeApiKey}` }
|
|
||||||
: {}),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { data: models } = res.data;
|
|
||||||
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
models.forEach((model: any) => {
|
|
||||||
chatModels[model.id] = {
|
|
||||||
displayName: model.id,
|
|
||||||
model: new ChatOpenAI({
|
|
||||||
apiKey: lemonadeApiKey || 'lemonade-key',
|
|
||||||
modelName: model.id,
|
|
||||||
temperature: 0.7,
|
|
||||||
configuration: {
|
|
||||||
baseURL: `${lemonadeApiEndpoint}/api/v1`,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Lemonade models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadLemonadeEmbeddingModels = async () => {
|
|
||||||
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
|
|
||||||
const lemonadeApiKey = getLemonadeApiKey();
|
|
||||||
|
|
||||||
if (!lemonadeApiEndpoint) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
...(lemonadeApiKey
|
|
||||||
? { Authorization: `Bearer ${lemonadeApiKey}` }
|
|
||||||
: {}),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { data: models } = res.data;
|
|
||||||
|
|
||||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
|
||||||
|
|
||||||
// Filter models that support embeddings (if Lemonade provides this info)
|
|
||||||
// For now, we'll assume all models can be used for embeddings
|
|
||||||
models.forEach((model: any) => {
|
|
||||||
embeddingModels[model.id] = {
|
|
||||||
displayName: model.id,
|
|
||||||
model: new OpenAIEmbeddings({
|
|
||||||
apiKey: lemonadeApiKey || 'lemonade-key',
|
|
||||||
modelName: model.id,
|
|
||||||
configuration: {
|
|
||||||
baseURL: `${lemonadeApiEndpoint}/api/v1`,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return embeddingModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Lemonade embedding models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
import { getKeepAlive, getLMStudioApiEndpoint } from '../config';
|
|
||||||
import axios from 'axios';
|
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'lmstudio',
|
|
||||||
displayName: 'LM Studio',
|
|
||||||
};
|
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
|
||||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
|
||||||
|
|
||||||
interface LMStudioModel {
|
|
||||||
id: string;
|
|
||||||
name?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ensureV1Endpoint = (endpoint: string): string =>
|
|
||||||
endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`;
|
|
||||||
|
|
||||||
const checkServerAvailability = async (endpoint: string): Promise<boolean> => {
|
|
||||||
try {
|
|
||||||
await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadLMStudioChatModels = async () => {
|
|
||||||
const endpoint = getLMStudioApiEndpoint();
|
|
||||||
|
|
||||||
if (!endpoint) return {};
|
|
||||||
if (!(await checkServerAvailability(endpoint))) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
});
|
|
||||||
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
response.data.data.forEach((model: LMStudioModel) => {
|
|
||||||
chatModels[model.id] = {
|
|
||||||
displayName: model.name || model.id,
|
|
||||||
model: new ChatOpenAI({
|
|
||||||
apiKey: 'lm-studio',
|
|
||||||
configuration: {
|
|
||||||
baseURL: ensureV1Endpoint(endpoint),
|
|
||||||
},
|
|
||||||
modelName: model.id,
|
|
||||||
temperature: 0.7,
|
|
||||||
streaming: true,
|
|
||||||
maxRetries: 3,
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading LM Studio models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadLMStudioEmbeddingsModels = async () => {
|
|
||||||
const endpoint = getLMStudioApiEndpoint();
|
|
||||||
|
|
||||||
if (!endpoint) return {};
|
|
||||||
if (!(await checkServerAvailability(endpoint))) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, {
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
});
|
|
||||||
|
|
||||||
const embeddingsModels: Record<string, EmbeddingModel> = {};
|
|
||||||
|
|
||||||
response.data.data.forEach((model: LMStudioModel) => {
|
|
||||||
embeddingsModels[model.id] = {
|
|
||||||
displayName: model.name || model.id,
|
|
||||||
model: new OpenAIEmbeddings({
|
|
||||||
apiKey: 'lm-studio',
|
|
||||||
configuration: {
|
|
||||||
baseURL: ensureV1Endpoint(endpoint),
|
|
||||||
},
|
|
||||||
modelName: model.id,
|
|
||||||
}) as unknown as Embeddings,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return embeddingsModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading LM Studio embeddings model: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
|
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'ollama',
|
|
||||||
displayName: 'Ollama',
|
|
||||||
};
|
|
||||||
import { ChatOllama } from '@langchain/ollama';
|
|
||||||
import { OllamaEmbeddings } from '@langchain/ollama';
|
|
||||||
|
|
||||||
export const loadOllamaChatModels = async () => {
|
|
||||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
|
||||||
const ollamaApiKey = getOllamaApiKey();
|
|
||||||
|
|
||||||
if (!ollamaApiEndpoint) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { models } = res.data;
|
|
||||||
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
models.forEach((model: any) => {
|
|
||||||
chatModels[model.model] = {
|
|
||||||
displayName: model.name,
|
|
||||||
model: new ChatOllama({
|
|
||||||
baseUrl: ollamaApiEndpoint,
|
|
||||||
model: model.model,
|
|
||||||
temperature: 0.7,
|
|
||||||
keepAlive: getKeepAlive(),
|
|
||||||
...(ollamaApiKey
|
|
||||||
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
|
|
||||||
: {}),
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Ollama models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadOllamaEmbeddingModels = async () => {
|
|
||||||
const ollamaApiEndpoint = getOllamaApiEndpoint();
|
|
||||||
const ollamaApiKey = getOllamaApiKey();
|
|
||||||
|
|
||||||
if (!ollamaApiEndpoint) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { models } = res.data;
|
|
||||||
|
|
||||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
|
||||||
|
|
||||||
models.forEach((model: any) => {
|
|
||||||
embeddingModels[model.model] = {
|
|
||||||
displayName: model.name,
|
|
||||||
model: new OllamaEmbeddings({
|
|
||||||
baseUrl: ollamaApiEndpoint,
|
|
||||||
model: model.model,
|
|
||||||
...(ollamaApiKey
|
|
||||||
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
|
|
||||||
: {}),
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return embeddingModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Ollama embeddings models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
|
||||||
import { getOpenaiApiKey } from '../config';
|
|
||||||
import { ChatModel, EmbeddingModel } from '.';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'openai',
|
|
||||||
displayName: 'OpenAI',
|
|
||||||
};
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
|
||||||
|
|
||||||
const openaiChatModels: Record<string, string>[] = [
|
|
||||||
{
|
|
||||||
displayName: 'GPT-3.5 Turbo',
|
|
||||||
key: 'gpt-3.5-turbo',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT-4',
|
|
||||||
key: 'gpt-4',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT-4 turbo',
|
|
||||||
key: 'gpt-4-turbo',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT-4 omni',
|
|
||||||
key: 'gpt-4o',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT-4o (2024-05-13)',
|
|
||||||
key: 'gpt-4o-2024-05-13',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT-4 omni mini',
|
|
||||||
key: 'gpt-4o-mini',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1 nano',
|
|
||||||
key: 'gpt-4.1-nano',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1 mini',
|
|
||||||
key: 'gpt-4.1-mini',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 4.1',
|
|
||||||
key: 'gpt-4.1',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 5 nano',
|
|
||||||
key: 'gpt-5-nano',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 5',
|
|
||||||
key: 'gpt-5',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'GPT 5 Mini',
|
|
||||||
key: 'gpt-5-mini',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'o1',
|
|
||||||
key: 'o1',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'o3',
|
|
||||||
key: 'o3',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'o3 Mini',
|
|
||||||
key: 'o3-mini',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'o4 Mini',
|
|
||||||
key: 'o4-mini',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const openaiEmbeddingModels: Record<string, string>[] = [
|
|
||||||
{
|
|
||||||
displayName: 'Text Embedding 3 Small',
|
|
||||||
key: 'text-embedding-3-small',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Text Embedding 3 Large',
|
|
||||||
key: 'text-embedding-3-large',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const loadOpenAIChatModels = async () => {
|
|
||||||
const openaiApiKey = getOpenaiApiKey();
|
|
||||||
|
|
||||||
if (!openaiApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
|
||||||
|
|
||||||
openaiChatModels.forEach((model) => {
|
|
||||||
// Models that only support temperature = 1
|
|
||||||
const temperatureRestrictedModels = [
|
|
||||||
'gpt-5-nano',
|
|
||||||
'gpt-5',
|
|
||||||
'gpt-5-mini',
|
|
||||||
'o1',
|
|
||||||
'o3',
|
|
||||||
'o3-mini',
|
|
||||||
'o4-mini',
|
|
||||||
];
|
|
||||||
const isTemperatureRestricted = temperatureRestrictedModels.some(
|
|
||||||
(restrictedModel) => model.key.includes(restrictedModel),
|
|
||||||
);
|
|
||||||
|
|
||||||
const modelConfig: any = {
|
|
||||||
apiKey: openaiApiKey,
|
|
||||||
modelName: model.key,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Only add temperature if the model supports it
|
|
||||||
if (!isTemperatureRestricted) {
|
|
||||||
modelConfig.temperature = 0.7;
|
|
||||||
}
|
|
||||||
|
|
||||||
chatModels[model.key] = {
|
|
||||||
displayName: model.displayName,
|
|
||||||
model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return chatModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading OpenAI models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadOpenAIEmbeddingModels = async () => {
|
|
||||||
const openaiApiKey = getOpenaiApiKey();
|
|
||||||
|
|
||||||
if (!openaiApiKey) return {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const embeddingModels: Record<string, EmbeddingModel> = {};
|
|
||||||
|
|
||||||
openaiEmbeddingModels.forEach((model) => {
|
|
||||||
embeddingModels[model.key] = {
|
|
||||||
displayName: model.displayName,
|
|
||||||
model: new OpenAIEmbeddings({
|
|
||||||
apiKey: openaiApiKey,
|
|
||||||
modelName: model.key,
|
|
||||||
}) as unknown as Embeddings,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return embeddingModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading OpenAI embeddings models: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
|
||||||
|
|
||||||
export const PROVIDER_INFO = {
|
|
||||||
key: 'transformers',
|
|
||||||
displayName: 'Hugging Face',
|
|
||||||
};
|
|
||||||
|
|
||||||
export const loadTransformersEmbeddingsModels = async () => {
|
|
||||||
try {
|
|
||||||
const embeddingModels = {
|
|
||||||
'xenova-bge-small-en-v1.5': {
|
|
||||||
displayName: 'BGE Small',
|
|
||||||
model: new HuggingFaceTransformersEmbeddings({
|
|
||||||
modelName: 'Xenova/bge-small-en-v1.5',
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
'xenova-gte-small': {
|
|
||||||
displayName: 'GTE Small',
|
|
||||||
model: new HuggingFaceTransformersEmbeddings({
|
|
||||||
modelName: 'Xenova/gte-small',
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
'xenova-bert-base-multilingual-uncased': {
|
|
||||||
displayName: 'Bert Multilingual',
|
|
||||||
model: new HuggingFaceTransformersEmbeddings({
|
|
||||||
modelName: 'Xenova/bert-base-multilingual-uncased',
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
return embeddingModels;
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error loading Transformers embeddings model: ${err}`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import { getSearxngApiEndpoint } from './config';
|
import { getSearxngURL } from './config/serverRegistry';
|
||||||
|
|
||||||
interface SearxngSearchOptions {
|
interface SearxngSearchOptions {
|
||||||
categories?: string[];
|
categories?: string[];
|
||||||
@@ -23,7 +23,7 @@ export const searchSearxng = async (
|
|||||||
query: string,
|
query: string,
|
||||||
opts?: SearxngSearchOptions,
|
opts?: SearxngSearchOptions,
|
||||||
) => {
|
) => {
|
||||||
const searxngURL = getSearxngApiEndpoint();
|
const searxngURL = getSearxngURL();
|
||||||
|
|
||||||
const url = new URL(`${searxngURL}/search?format=json`);
|
const url = new URL(`${searxngURL}/search?format=json`);
|
||||||
url.searchParams.append('q', query);
|
url.searchParams.append('q', query);
|
||||||
|
|||||||
7
src/lib/serverUtils.ts
Normal file
7
src/lib/serverUtils.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import crypto from 'crypto';
|
||||||
|
|
||||||
|
export const hashObj = (obj: { [key: string]: any }) => {
|
||||||
|
const json = JSON.stringify(obj, Object.keys(obj).sort());
|
||||||
|
const hash = crypto.createHash('sha256').update(json).digest('hex');
|
||||||
|
return hash;
|
||||||
|
};
|
||||||
@@ -49,6 +49,9 @@ const config: Config = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
plugins: [require('@tailwindcss/typography')],
|
plugins: [
|
||||||
|
require('@tailwindcss/typography'),
|
||||||
|
require('@headlessui/tailwindcss')({ prefix: 'headless' }),
|
||||||
|
],
|
||||||
};
|
};
|
||||||
export default config;
|
export default config;
|
||||||
|
|||||||
103
yarn.lock
103
yarn.lock
@@ -39,6 +39,13 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
|
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
|
||||||
integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
|
integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
|
||||||
|
|
||||||
|
"@cspotcode/source-map-support@^0.8.0":
|
||||||
|
version "0.8.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
|
||||||
|
integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==
|
||||||
|
dependencies:
|
||||||
|
"@jridgewell/trace-mapping" "0.3.9"
|
||||||
|
|
||||||
"@dabh/diagnostics@^2.0.2":
|
"@dabh/diagnostics@^2.0.2":
|
||||||
version "2.0.3"
|
version "2.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
|
resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
|
||||||
@@ -407,6 +414,11 @@
|
|||||||
"@react-aria/interactions" "^3.21.3"
|
"@react-aria/interactions" "^3.21.3"
|
||||||
"@tanstack/react-virtual" "^3.8.1"
|
"@tanstack/react-virtual" "^3.8.1"
|
||||||
|
|
||||||
|
"@headlessui/tailwindcss@^0.2.2":
|
||||||
|
version "0.2.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@headlessui/tailwindcss/-/tailwindcss-0.2.2.tgz#8ebde73fabca72d48636ea56ae790209dc5f0d49"
|
||||||
|
integrity sha512-xNe42KjdyA4kfUKLLPGzME9zkH7Q3rOZ5huFihWNWOQFxnItxPB3/67yBI8/qBfY8nwBRx5GHn4VprsoluVMGw==
|
||||||
|
|
||||||
"@huggingface/jinja@^0.2.2":
|
"@huggingface/jinja@^0.2.2":
|
||||||
version "0.2.2"
|
version "0.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
|
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
|
||||||
@@ -575,7 +587,7 @@
|
|||||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||||
"@jridgewell/trace-mapping" "^0.3.24"
|
"@jridgewell/trace-mapping" "^0.3.24"
|
||||||
|
|
||||||
"@jridgewell/resolve-uri@^3.1.0":
|
"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0":
|
||||||
version "3.1.2"
|
version "3.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6"
|
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6"
|
||||||
integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==
|
integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==
|
||||||
@@ -590,6 +602,14 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32"
|
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32"
|
||||||
integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==
|
integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==
|
||||||
|
|
||||||
|
"@jridgewell/trace-mapping@0.3.9":
|
||||||
|
version "0.3.9"
|
||||||
|
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9"
|
||||||
|
integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==
|
||||||
|
dependencies:
|
||||||
|
"@jridgewell/resolve-uri" "^3.0.3"
|
||||||
|
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||||
|
|
||||||
"@jridgewell/trace-mapping@^0.3.24":
|
"@jridgewell/trace-mapping@^0.3.24":
|
||||||
version "0.3.25"
|
version "0.3.25"
|
||||||
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0"
|
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0"
|
||||||
@@ -927,6 +947,26 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4"
|
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4"
|
||||||
integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw==
|
integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw==
|
||||||
|
|
||||||
|
"@tsconfig/node10@^1.0.7":
|
||||||
|
version "1.0.11"
|
||||||
|
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2"
|
||||||
|
integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==
|
||||||
|
|
||||||
|
"@tsconfig/node12@^1.0.7":
|
||||||
|
version "1.0.11"
|
||||||
|
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d"
|
||||||
|
integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==
|
||||||
|
|
||||||
|
"@tsconfig/node14@^1.0.0":
|
||||||
|
version "1.0.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1"
|
||||||
|
integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==
|
||||||
|
|
||||||
|
"@tsconfig/node16@^1.0.2":
|
||||||
|
version "1.0.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9"
|
||||||
|
integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==
|
||||||
|
|
||||||
"@types/better-sqlite3@^7.6.12":
|
"@types/better-sqlite3@^7.6.12":
|
||||||
version "7.6.12"
|
version "7.6.12"
|
||||||
resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7"
|
resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7"
|
||||||
@@ -1119,6 +1159,18 @@ acorn-jsx@^5.3.2:
|
|||||||
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
|
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
|
||||||
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
|
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
|
||||||
|
|
||||||
|
acorn-walk@^8.1.1:
|
||||||
|
version "8.3.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7"
|
||||||
|
integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==
|
||||||
|
dependencies:
|
||||||
|
acorn "^8.11.0"
|
||||||
|
|
||||||
|
acorn@^8.11.0, acorn@^8.4.1:
|
||||||
|
version "8.15.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816"
|
||||||
|
integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==
|
||||||
|
|
||||||
acorn@^8.9.0:
|
acorn@^8.9.0:
|
||||||
version "8.11.3"
|
version "8.11.3"
|
||||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
||||||
@@ -1181,6 +1233,11 @@ anymatch@~3.1.2:
|
|||||||
normalize-path "^3.0.0"
|
normalize-path "^3.0.0"
|
||||||
picomatch "^2.0.4"
|
picomatch "^2.0.4"
|
||||||
|
|
||||||
|
arg@^4.1.0:
|
||||||
|
version "4.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
|
||||||
|
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
|
||||||
|
|
||||||
arg@^5.0.2:
|
arg@^5.0.2:
|
||||||
version "5.0.2"
|
version "5.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
|
resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
|
||||||
@@ -1739,6 +1796,11 @@ core-util-is@~1.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
||||||
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
||||||
|
|
||||||
|
create-require@^1.1.0:
|
||||||
|
version "1.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
|
||||||
|
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
|
||||||
|
|
||||||
cross-fetch@^3.1.5:
|
cross-fetch@^3.1.5:
|
||||||
version "3.2.0"
|
version "3.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3"
|
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3"
|
||||||
@@ -1883,6 +1945,11 @@ didyoumean@^1.2.2:
|
|||||||
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
|
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
|
||||||
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
|
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
|
||||||
|
|
||||||
|
diff@^4.0.1:
|
||||||
|
version "4.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
|
||||||
|
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
||||||
|
|
||||||
dingbat-to-unicode@^1.0.1:
|
dingbat-to-unicode@^1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83"
|
resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83"
|
||||||
@@ -3510,6 +3577,11 @@ lucide-react@^0.363.0:
|
|||||||
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3"
|
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3"
|
||||||
integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ==
|
integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ==
|
||||||
|
|
||||||
|
make-error@^1.1.1:
|
||||||
|
version "1.3.6"
|
||||||
|
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
|
||||||
|
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
|
||||||
|
|
||||||
mammoth@^1.9.1:
|
mammoth@^1.9.1:
|
||||||
version "1.9.1"
|
version "1.9.1"
|
||||||
resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf"
|
resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf"
|
||||||
@@ -4971,6 +5043,25 @@ ts-interface-checker@^0.1.9:
|
|||||||
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
|
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
|
||||||
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
|
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
|
||||||
|
|
||||||
|
ts-node@^10.9.2:
|
||||||
|
version "10.9.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f"
|
||||||
|
integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==
|
||||||
|
dependencies:
|
||||||
|
"@cspotcode/source-map-support" "^0.8.0"
|
||||||
|
"@tsconfig/node10" "^1.0.7"
|
||||||
|
"@tsconfig/node12" "^1.0.7"
|
||||||
|
"@tsconfig/node14" "^1.0.0"
|
||||||
|
"@tsconfig/node16" "^1.0.2"
|
||||||
|
acorn "^8.4.1"
|
||||||
|
acorn-walk "^8.1.1"
|
||||||
|
arg "^4.1.0"
|
||||||
|
create-require "^1.1.0"
|
||||||
|
diff "^4.0.1"
|
||||||
|
make-error "^1.1.1"
|
||||||
|
v8-compile-cache-lib "^3.0.1"
|
||||||
|
yn "3.1.1"
|
||||||
|
|
||||||
tsconfig-paths@^3.15.0:
|
tsconfig-paths@^3.15.0:
|
||||||
version "3.15.0"
|
version "3.15.0"
|
||||||
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4"
|
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4"
|
||||||
@@ -5138,6 +5229,11 @@ uuid@^9.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
|
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
|
||||||
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
|
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
|
||||||
|
|
||||||
|
v8-compile-cache-lib@^3.0.1:
|
||||||
|
version "3.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
|
||||||
|
integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==
|
||||||
|
|
||||||
validate.io-array@^1.0.3, validate.io-array@^1.0.5:
|
validate.io-array@^1.0.3, validate.io-array@^1.0.5:
|
||||||
version "1.0.6"
|
version "1.0.6"
|
||||||
resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d"
|
resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d"
|
||||||
@@ -5355,6 +5451,11 @@ yet-another-react-lightbox@^3.17.2:
|
|||||||
resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c"
|
resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c"
|
||||||
integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ==
|
integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ==
|
||||||
|
|
||||||
|
yn@3.1.1:
|
||||||
|
version "3.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||||
|
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
|
||||||
|
|
||||||
yocto-queue@^0.1.0:
|
yocto-queue@^0.1.0:
|
||||||
version "0.1.0"
|
version "0.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
||||||
|
|||||||
Reference in New Issue
Block a user