mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-22 04:58:15 +00:00
Compare commits
16 Commits
feat/confi
...
672fc3c3a8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
672fc3c3a8 | ||
|
|
67c2672f39 | ||
|
|
334326744c | ||
|
|
042ce33cf4 | ||
|
|
22b9a48b26 | ||
|
|
e024d46971 | ||
|
|
af36f15f3b | ||
|
|
3d2d056f64 | ||
|
|
d9ebf611ff | ||
|
|
eef6ebb924 | ||
|
|
65975ba6fc | ||
|
|
51629b2cca | ||
|
|
7d71643f42 | ||
|
|
4564175822 | ||
|
|
9d52d01f31 | ||
|
|
5abd42d46d |
25
package.json
25
package.json
@@ -13,18 +13,18 @@
|
||||
"dependencies": {
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@headlessui/tailwindcss": "^0.2.2",
|
||||
"@huggingface/transformers": "^3.7.5",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||
"@langchain/anthropic": "^0.3.24",
|
||||
"@langchain/community": "^0.3.49",
|
||||
"@langchain/core": "^0.3.66",
|
||||
"@langchain/google-genai": "^0.2.15",
|
||||
"@langchain/groq": "^0.2.3",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@langchain/openai": "^0.6.2",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@langchain/anthropic": "^1.0.0",
|
||||
"@langchain/community": "^1.0.0",
|
||||
"@langchain/core": "^1.0.1",
|
||||
"@langchain/google-genai": "^1.0.0",
|
||||
"@langchain/groq": "^1.0.0",
|
||||
"@langchain/ollama": "^1.0.0",
|
||||
"@langchain/openai": "^1.0.0",
|
||||
"@langchain/textsplitters": "^1.0.0",
|
||||
"@tailwindcss/typography": "^0.5.12",
|
||||
"@xenova/transformers": "^2.17.2",
|
||||
"axios": "^1.8.3",
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"clsx": "^2.1.0",
|
||||
@@ -33,7 +33,7 @@
|
||||
"framer-motion": "^12.23.24",
|
||||
"html-to-text": "^9.0.5",
|
||||
"jspdf": "^3.0.1",
|
||||
"langchain": "^0.3.30",
|
||||
"langchain": "^1.0.1",
|
||||
"lucide-react": "^0.363.0",
|
||||
"mammoth": "^1.9.1",
|
||||
"markdown-to-jsx": "^7.7.2",
|
||||
@@ -54,7 +54,7 @@
|
||||
"@types/better-sqlite3": "^7.6.12",
|
||||
"@types/html-to-text": "^9.0.4",
|
||||
"@types/jspdf": "^2.0.0",
|
||||
"@types/node": "^20",
|
||||
"@types/node": "^24.8.1",
|
||||
"@types/pdf-parse": "^1.1.4",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
@@ -65,7 +65,6 @@
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.2.5",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5"
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ const handleEmitterEvents = async (
|
||||
encoder: TextEncoder,
|
||||
chatId: string,
|
||||
) => {
|
||||
let recievedMessage = '';
|
||||
let receivedMessage = '';
|
||||
const aiMessageId = crypto.randomBytes(7).toString('hex');
|
||||
|
||||
stream.on('data', (data) => {
|
||||
@@ -113,7 +113,7 @@ const handleEmitterEvents = async (
|
||||
),
|
||||
);
|
||||
|
||||
recievedMessage += parsedData.data;
|
||||
receivedMessage += parsedData.data;
|
||||
} else if (parsedData.type === 'sources') {
|
||||
writer.write(
|
||||
encoder.encode(
|
||||
@@ -150,7 +150,7 @@ const handleEmitterEvents = async (
|
||||
|
||||
db.insert(messagesSchema)
|
||||
.values({
|
||||
content: recievedMessage,
|
||||
content: receivedMessage,
|
||||
chatId: chatId,
|
||||
messageId: aiMessageId,
|
||||
role: 'assistant',
|
||||
|
||||
@@ -5,7 +5,7 @@ import crypto from 'crypto';
|
||||
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
|
||||
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { Document } from 'langchain/document';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
|
||||
interface FileRes {
|
||||
|
||||
@@ -16,7 +16,7 @@ const Chat = () => {
|
||||
useEffect(() => {
|
||||
const updateDividerWidth = () => {
|
||||
if (dividerRef.current) {
|
||||
setDividerWidth(dividerRef.current.scrollWidth);
|
||||
setDividerWidth(dividerRef.current.offsetWidth);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -31,13 +31,22 @@ const Chat = () => {
|
||||
|
||||
useEffect(() => {
|
||||
const scroll = () => {
|
||||
messageEnd.current?.scrollIntoView({ behavior: 'smooth' });
|
||||
messageEnd.current?.scrollIntoView({ behavior: 'auto' });
|
||||
};
|
||||
|
||||
if (chatTurns.length === 1) {
|
||||
document.title = `${chatTurns[0].content.substring(0, 30)} - Perplexica`;
|
||||
}
|
||||
|
||||
const messageEndBottom =
|
||||
messageEnd.current?.getBoundingClientRect().bottom ?? 0;
|
||||
|
||||
const distanceFromMessageEnd = window.innerHeight - messageEndBottom;
|
||||
|
||||
if (distanceFromMessageEnd >= -100) {
|
||||
scroll();
|
||||
}
|
||||
|
||||
if (chatTurns[chatTurns.length - 1]?.role === 'user') {
|
||||
scroll();
|
||||
}
|
||||
|
||||
@@ -5,8 +5,7 @@ import Focus from './MessageInputActions/Focus';
|
||||
import Optimization from './MessageInputActions/Optimization';
|
||||
import Attach from './MessageInputActions/Attach';
|
||||
import { useChat } from '@/lib/hooks/useChat';
|
||||
import AttachSmall from './MessageInputActions/AttachSmall';
|
||||
import ModelSelector from './MessageInputActions/ModelSelector';
|
||||
import ModelSelector from './MessageInputActions/ChatModelSelector';
|
||||
|
||||
const EmptyChatMessageInput = () => {
|
||||
const { sendMessage } = useChat();
|
||||
|
||||
@@ -97,7 +97,7 @@ const ModelSelector = () => {
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-[230px] sm:w-[270px] md:w-[300px] -right-4">
|
||||
<div className="bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden">
|
||||
<div className="bg-light-primary dark:bg-dark-primary max-h-[300px] sm:max-w-none border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden">
|
||||
<div className="p-4 border-b border-light-200 dark:border-dark-200">
|
||||
<div className="relative">
|
||||
<Search
|
||||
@@ -109,7 +109,7 @@ const ModelSelector = () => {
|
||||
placeholder="Search models..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="w-full pl-9 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg text-xs text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none focus:ring-2 focus:ring-sky-500/20 border border-transparent focus:border-sky-500/30 transition duration-200"
|
||||
className="w-full pl-9 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg placeholder:text-sm text-sm text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none focus:ring-2 focus:ring-sky-500/20 border border-transparent focus:border-sky-500/30 transition duration-200"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
80
src/components/Settings/Sections/Models/ModelSelect.tsx
Normal file
80
src/components/Settings/Sections/Models/ModelSelect.tsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import Select from '@/components/ui/Select';
|
||||
import { ConfigModelProvider } from '@/lib/config/types';
|
||||
import { useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
const ModelSelect = ({
|
||||
providers,
|
||||
type,
|
||||
}: {
|
||||
providers: ConfigModelProvider[];
|
||||
type: 'chat' | 'embedding';
|
||||
}) => {
|
||||
const [selectedModel, setSelectedModel] = useState<string>(
|
||||
`${providers[0]?.id}/${providers[0].embeddingModels[0]?.key}`,
|
||||
);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const handleSave = async (newValue: string) => {
|
||||
setLoading(true);
|
||||
setSelectedModel(newValue);
|
||||
try {
|
||||
if (type === 'chat') {
|
||||
localStorage.setItem('chatModelProviderId', newValue.split('/')[0]);
|
||||
localStorage.setItem('chatModelKey', newValue.split('/')[1]);
|
||||
} else {
|
||||
localStorage.setItem(
|
||||
'embeddingModelProviderId',
|
||||
newValue.split('/')[0],
|
||||
);
|
||||
localStorage.setItem('embeddingModelKey', newValue.split('/')[1]);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving config:', error);
|
||||
toast.error('Failed to save configuration.');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
|
||||
<div className="space-y-5">
|
||||
<div>
|
||||
<h4 className="text-base text-black dark:text-white">
|
||||
Select {type === 'chat' ? 'Chat Model' : 'Embedding Model'}
|
||||
</h4>
|
||||
<p className="text-xs text-black/50 dark:text-white/50">
|
||||
{type === 'chat'
|
||||
? 'Select the model to use for chat responses'
|
||||
: 'Select the model to use for embeddings'}
|
||||
</p>
|
||||
</div>
|
||||
<Select
|
||||
value={selectedModel}
|
||||
onChange={(event) => handleSave(event.target.value)}
|
||||
options={
|
||||
type === 'chat'
|
||||
? providers.flatMap((provider) =>
|
||||
provider.chatModels.map((model) => ({
|
||||
value: `${provider.id}/${model.key}`,
|
||||
label: `${provider.name} - ${model.name}`,
|
||||
})),
|
||||
)
|
||||
: providers.flatMap((provider) =>
|
||||
provider.embeddingModels.map((model) => ({
|
||||
value: `${provider.id}/${model.key}`,
|
||||
label: `${provider.name} - ${model.name}`,
|
||||
})),
|
||||
)
|
||||
}
|
||||
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60 cursor-pointer capitalize pr-12"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
export default ModelSelect;
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
UIConfigField,
|
||||
} from '@/lib/config/types';
|
||||
import ModelProvider from './ModelProvider';
|
||||
import ModelSelect from './ModelSelect';
|
||||
|
||||
const Models = ({
|
||||
fields,
|
||||
@@ -17,14 +18,21 @@ const Models = ({
|
||||
const [providers, setProviders] = useState<ConfigModelProvider[]>(values);
|
||||
|
||||
return (
|
||||
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6">
|
||||
<div className="flex flex-row justify-between items-center">
|
||||
<div className="flex-1 space-y-6 overflow-y-auto py-6">
|
||||
<div className="flex flex-col px-6 gap-y-4">
|
||||
<h3 className="text-sm text-black/70 dark:text-white/70">
|
||||
Select models
|
||||
</h3>
|
||||
<ModelSelect providers={values} type="embedding" />
|
||||
</div>
|
||||
<div className="border-t border-light-200 dark:border-dark-200" />
|
||||
<div className="flex flex-row justify-between items-center px-6 ">
|
||||
<p className="text-sm text-black/70 dark:text-white/70">
|
||||
Manage model provider
|
||||
</p>
|
||||
<AddProvider modelProviders={fields} setProviders={setProviders} />
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex flex-col px-6 gap-y-4">
|
||||
{providers.map((provider) => (
|
||||
<ModelProvider
|
||||
key={`provider-${provider.id}`}
|
||||
|
||||
@@ -124,7 +124,7 @@ class ConfigManager {
|
||||
providerConfigSections.forEach((provider) => {
|
||||
const newProvider: ConfigModelProvider & { required?: string[] } = {
|
||||
id: crypto.randomUUID(),
|
||||
name: `${provider.name} ${Math.floor(Math.random() * 1000)}`,
|
||||
name: `${provider.name}`,
|
||||
type: provider.key,
|
||||
chatModels: [],
|
||||
embeddingModels: [],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { sql } from 'drizzle-orm';
|
||||
import { text, integer, sqliteTable } from 'drizzle-orm/sqlite-core';
|
||||
import { Document } from 'langchain/document';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
|
||||
export const messages = sqliteTable('messages', {
|
||||
id: integer('id').primaryKey(),
|
||||
|
||||
@@ -67,12 +67,8 @@ export class HuggingFaceTransformersEmbeddings
|
||||
}
|
||||
|
||||
private async runEmbedding(texts: string[]) {
|
||||
const { pipeline } = await import('@xenova/transformers');
|
||||
|
||||
const pipe = await (this.pipelinePromise ??= pipeline(
|
||||
'feature-extraction',
|
||||
this.model,
|
||||
));
|
||||
const { pipeline } = await import('@huggingface/transformers');
|
||||
const pipe = await pipeline('feature-extraction', this.model);
|
||||
|
||||
return this.caller.call(async () => {
|
||||
const output = await pipe(texts, { pooling: 'mean', normalize: true });
|
||||
|
||||
152
src/lib/models/providers/aiml.ts
Normal file
152
src/lib/models/providers/aiml.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface AimlConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your AI/ML API key',
|
||||
required: true,
|
||||
placeholder: 'AI/ML API Key',
|
||||
env: 'AIML_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class AimlProvider extends BaseModelProvider<AimlConfig> {
|
||||
constructor(id: string, name: string, config: AimlConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const res = await fetch('https://api.aimlapi.com/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this.config.apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const chatModels: Model[] = data.data
|
||||
.filter((m: any) => m.type === 'chat-completion')
|
||||
.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
const embeddingModels: Model[] = data.data
|
||||
.filter((m: any) => m.type === 'embedding')
|
||||
.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: embeddingModels,
|
||||
chat: chatModels,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to AI/ML API. Please ensure your API key is correct and the service is available.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading AI/ML API Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: 'https://api.aimlapi.com',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading AI/ML API Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new OpenAIEmbeddings({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: 'https://api.aimlapi.com',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): AimlConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'aiml',
|
||||
name: 'AI/ML API',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default AimlProvider;
|
||||
115
src/lib/models/providers/anthropic.ts
Normal file
115
src/lib/models/providers/anthropic.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatAnthropic } from '@langchain/anthropic';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface AnthropicConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Anthropic API key',
|
||||
required: true,
|
||||
placeholder: 'Anthropic API Key',
|
||||
env: 'ANTHROPIC_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class AnthropicProvider extends BaseModelProvider<AnthropicConfig> {
|
||||
constructor(id: string, name: string, config: AnthropicConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
const res = await fetch('https://api.anthropic.com/v1/models?limit=999', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'x-api-key': this.config.apiKey,
|
||||
'anthropic-version': '2023-06-01',
|
||||
'Content-type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch Anthropic models: ${res.statusText}`);
|
||||
}
|
||||
|
||||
const data = (await res.json()).data;
|
||||
|
||||
const models: Model[] = data.map((m: any) => {
|
||||
return {
|
||||
key: m.id,
|
||||
name: m.display_name,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: models,
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Anthropic Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatAnthropic({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
throw new Error('Anthropic provider does not support embedding models.');
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): AnthropicConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'anthropic',
|
||||
name: 'Anthropic',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default AnthropicProvider;
|
||||
107
src/lib/models/providers/deepseek.ts
Normal file
107
src/lib/models/providers/deepseek.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface DeepSeekConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const defaultChatModels: Model[] = [
|
||||
{
|
||||
name: 'Deepseek Chat / DeepSeek V3.2 Exp',
|
||||
key: 'deepseek-chat',
|
||||
},
|
||||
{
|
||||
name: 'Deepseek Reasoner / DeepSeek V3.2 Exp',
|
||||
key: 'deepseek-reasoner',
|
||||
},
|
||||
];
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your DeepSeek API key',
|
||||
required: true,
|
||||
placeholder: 'DeepSeek API Key',
|
||||
env: 'DEEPSEEK_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class DeepSeekProvider extends BaseModelProvider<DeepSeekConfig> {
|
||||
constructor(id: string, name: string, config: DeepSeekConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
return {
|
||||
embedding: [],
|
||||
chat: defaultChatModels,
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading DeepSeek Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: 'https://api.deepseek.com',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
throw new Error('DeepSeek provider does not support embedding models.');
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): DeepSeekConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'deepseek',
|
||||
name: 'Deepseek AI',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default DeepSeekProvider;
|
||||
140
src/lib/models/providers/gemini.ts
Normal file
140
src/lib/models/providers/gemini.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import {
|
||||
ChatGoogleGenerativeAI,
|
||||
GoogleGenerativeAIEmbeddings,
|
||||
} from '@langchain/google-genai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface GeminiConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Google Gemini API key',
|
||||
required: true,
|
||||
placeholder: 'Google Gemini API Key',
|
||||
env: 'GEMINI_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class GeminiProvider extends BaseModelProvider<GeminiConfig> {
|
||||
constructor(id: string, name: string, config: GeminiConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
const res = await fetch(
|
||||
`https://generativelanguage.googleapis.com/v1beta/models?key=${this.config.apiKey}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
let defaultEmbeddingModels: Model[] = [];
|
||||
let defaultChatModels: Model[] = [];
|
||||
|
||||
data.models.forEach((m: any) => {
|
||||
if (m.supportedGenerationMethods.includes('embedText')) {
|
||||
defaultEmbeddingModels.push({
|
||||
key: m.name,
|
||||
name: m.displayName,
|
||||
});
|
||||
} else if (m.supportedGenerationMethods.includes('generateContent')) {
|
||||
defaultChatModels.push({
|
||||
key: m.name,
|
||||
name: m.displayName,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: defaultEmbeddingModels,
|
||||
chat: defaultChatModels,
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Gemini Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatGoogleGenerativeAI({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Gemini Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new GoogleGenerativeAIEmbeddings({
|
||||
apiKey: this.config.apiKey,
|
||||
model: key,
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): GeminiConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'gemini',
|
||||
name: 'Google Gemini',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default GeminiProvider;
|
||||
118
src/lib/models/providers/groq.ts
Normal file
118
src/lib/models/providers/groq.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatGroq } from '@langchain/groq';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface GroqConfig {
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Groq API key',
|
||||
required: true,
|
||||
placeholder: 'Groq API Key',
|
||||
env: 'GROQ_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class GroqProvider extends BaseModelProvider<GroqConfig> {
|
||||
constructor(id: string, name: string, config: GroqConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const res = await fetch('https://api.groq.com/openai/v1/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this.config.apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const models: Model[] = data.data.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: models,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to Groq API. Please ensure your API key is correct and the Groq service is available.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error('Error Loading Groq Chat Model. Invalid Model Selected');
|
||||
}
|
||||
|
||||
return new ChatGroq({
|
||||
apiKey: this.config.apiKey,
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
throw new Error('Groq provider does not support embedding models.');
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): GroqConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.apiKey)
|
||||
throw new Error('Invalid config provided. API key must be provided');
|
||||
|
||||
return {
|
||||
apiKey: String(raw.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'groq',
|
||||
name: 'Groq',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default GroqProvider;
|
||||
@@ -2,10 +2,26 @@ import { ModelProviderUISection } from '@/lib/config/types';
|
||||
import { ProviderConstructor } from './baseProvider';
|
||||
import OpenAIProvider from './openai';
|
||||
import OllamaProvider from './ollama';
|
||||
import TransformersProvider from './transformers';
|
||||
import AnthropicProvider from './anthropic';
|
||||
import GeminiProvider from './gemini';
|
||||
import GroqProvider from './groq';
|
||||
import DeepSeekProvider from './deepseek';
|
||||
import LMStudioProvider from './lmstudio';
|
||||
import LemonadeProvider from './lemonade';
|
||||
import AimlProvider from '@/lib/models/providers/aiml';
|
||||
|
||||
export const providers: Record<string, ProviderConstructor<any>> = {
|
||||
openai: OpenAIProvider,
|
||||
ollama: OllamaProvider,
|
||||
transformers: TransformersProvider,
|
||||
anthropic: AnthropicProvider,
|
||||
gemini: GeminiProvider,
|
||||
groq: GroqProvider,
|
||||
deepseek: DeepSeekProvider,
|
||||
aiml: AimlProvider,
|
||||
lmstudio: LMStudioProvider,
|
||||
lemonade: LemonadeProvider,
|
||||
};
|
||||
|
||||
export const getModelProvidersUIConfigSection =
|
||||
|
||||
158
src/lib/models/providers/lemonade.ts
Normal file
158
src/lib/models/providers/lemonade.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface LemonadeConfig {
|
||||
baseURL: string;
|
||||
apiKey?: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for Lemonade API',
|
||||
required: true,
|
||||
placeholder: 'https://api.lemonade.ai/v1',
|
||||
env: 'LEMONADE_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
{
|
||||
type: 'password',
|
||||
name: 'API Key',
|
||||
key: 'apiKey',
|
||||
description: 'Your Lemonade API key (optional)',
|
||||
required: false,
|
||||
placeholder: 'Lemonade API Key',
|
||||
env: 'LEMONADE_API_KEY',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class LemonadeProvider extends BaseModelProvider<LemonadeConfig> {
|
||||
constructor(id: string, name: string, config: LemonadeConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
if (this.config.apiKey) {
|
||||
headers['Authorization'] = `Bearer ${this.config.apiKey}`;
|
||||
}
|
||||
|
||||
const res = await fetch(`${this.config.baseURL}/models`, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const models: Model[] = data.data.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: models,
|
||||
chat: models,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to Lemonade API. Please ensure the base URL is correct and the service is available.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Lemonade Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
apiKey: this.config.apiKey || 'not-needed',
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.config.baseURL,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading Lemonade Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new OpenAIEmbeddings({
|
||||
apiKey: this.config.apiKey || 'not-needed',
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.config.baseURL,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): LemonadeConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.baseURL)
|
||||
throw new Error('Invalid config provided. Base URL must be provided');
|
||||
|
||||
return {
|
||||
baseURL: String(raw.baseURL),
|
||||
apiKey: raw.apiKey ? String(raw.apiKey) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'lemonade',
|
||||
name: 'Lemonade',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default LemonadeProvider;
|
||||
148
src/lib/models/providers/lmstudio.ts
Normal file
148
src/lib/models/providers/lmstudio.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
|
||||
interface LMStudioConfig {
|
||||
baseURL: string;
|
||||
}
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [
|
||||
{
|
||||
type: 'string',
|
||||
name: 'Base URL',
|
||||
key: 'baseURL',
|
||||
description: 'The base URL for LM Studio server',
|
||||
required: true,
|
||||
placeholder: 'http://localhost:1234',
|
||||
env: 'LM_STUDIO_BASE_URL',
|
||||
scope: 'server',
|
||||
},
|
||||
];
|
||||
|
||||
class LMStudioProvider extends BaseModelProvider<LMStudioConfig> {
|
||||
constructor(id: string, name: string, config: LMStudioConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
private normalizeBaseURL(url: string): string {
|
||||
const trimmed = url.trim().replace(/\/+$/, '');
|
||||
return trimmed.endsWith('/v1') ? trimmed : `${trimmed}/v1`;
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
try {
|
||||
const baseURL = this.normalizeBaseURL(this.config.baseURL);
|
||||
|
||||
const res = await fetch(`${baseURL}/models`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
const models: Model[] = data.data.map((m: any) => {
|
||||
return {
|
||||
name: m.id,
|
||||
key: m.id,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
embedding: models,
|
||||
chat: models,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) {
|
||||
throw new Error(
|
||||
'Error connecting to LM Studio. Please ensure the base URL is correct and the LM Studio server is running.',
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [...defaultModels.chat, ...configProvider.chatModels],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
const modelList = await this.getModelList();
|
||||
|
||||
const exists = modelList.chat.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading LM Studio Chat Model. Invalid Model Selected',
|
||||
);
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
apiKey: 'lm-studio',
|
||||
temperature: 0.7,
|
||||
model: key,
|
||||
streaming: true,
|
||||
configuration: {
|
||||
baseURL: this.normalizeBaseURL(this.config.baseURL),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading LM Studio Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new OpenAIEmbeddings({
|
||||
apiKey: 'lm-studio',
|
||||
model: key,
|
||||
configuration: {
|
||||
baseURL: this.normalizeBaseURL(this.config.baseURL),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): LMStudioConfig {
|
||||
if (!raw || typeof raw !== 'object')
|
||||
throw new Error('Invalid config provided. Expected object');
|
||||
if (!raw.baseURL)
|
||||
throw new Error('Invalid config provided. Base URL must be provided');
|
||||
|
||||
return {
|
||||
baseURL: String(raw.baseURL),
|
||||
};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'lmstudio',
|
||||
name: 'LM Studio',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default LMStudioProvider;
|
||||
88
src/lib/models/providers/transformers.ts
Normal file
88
src/lib/models/providers/transformers.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
||||
import BaseModelProvider from './baseProvider';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { UIConfigField } from '@/lib/config/types';
|
||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||
import { HuggingFaceTransformersEmbeddings } from '@/lib/huggingfaceTransformer';
|
||||
|
||||
interface TransformersConfig {}
|
||||
|
||||
const defaultEmbeddingModels: Model[] = [
|
||||
{
|
||||
name: 'all-MiniLM-L6-v2',
|
||||
key: 'Xenova/all-MiniLM-L6-v2',
|
||||
},
|
||||
{
|
||||
name: 'mxbai-embed-large-v1',
|
||||
key: 'mixedbread-ai/mxbai-embed-large-v1',
|
||||
},
|
||||
{
|
||||
name: 'nomic-embed-text-v1',
|
||||
key: 'Xenova/nomic-embed-text-v1',
|
||||
},
|
||||
];
|
||||
|
||||
const providerConfigFields: UIConfigField[] = [];
|
||||
|
||||
class TransformersProvider extends BaseModelProvider<TransformersConfig> {
|
||||
constructor(id: string, name: string, config: TransformersConfig) {
|
||||
super(id, name, config);
|
||||
}
|
||||
|
||||
async getDefaultModels(): Promise<ModelList> {
|
||||
return {
|
||||
embedding: [...defaultEmbeddingModels],
|
||||
chat: [],
|
||||
};
|
||||
}
|
||||
|
||||
async getModelList(): Promise<ModelList> {
|
||||
const defaultModels = await this.getDefaultModels();
|
||||
const configProvider = getConfiguredModelProviderById(this.id)!;
|
||||
|
||||
return {
|
||||
embedding: [
|
||||
...defaultModels.embedding,
|
||||
...configProvider.embeddingModels,
|
||||
],
|
||||
chat: [],
|
||||
};
|
||||
}
|
||||
|
||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
||||
throw new Error('Transformers Provider does not support chat models.');
|
||||
}
|
||||
|
||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
||||
const modelList = await this.getModelList();
|
||||
const exists = modelList.embedding.find((m) => m.key === key);
|
||||
|
||||
if (!exists) {
|
||||
throw new Error(
|
||||
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
|
||||
);
|
||||
}
|
||||
|
||||
return new HuggingFaceTransformersEmbeddings({
|
||||
model: key,
|
||||
});
|
||||
}
|
||||
|
||||
static parseAndValidate(raw: any): TransformersConfig {
|
||||
return {};
|
||||
}
|
||||
|
||||
static getProviderConfigFields(): UIConfigField[] {
|
||||
return providerConfigFields;
|
||||
}
|
||||
|
||||
static getProviderMetadata(): ProviderMetadata {
|
||||
return {
|
||||
key: 'transformers',
|
||||
name: 'Transformers',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default TransformersProvider;
|
||||
@@ -16,7 +16,7 @@ import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import LineListOutputParser from '../outputParsers/listLineOutputParser';
|
||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
||||
import { getDocumentsFromLinks } from '../utils/documents';
|
||||
import { Document } from 'langchain/document';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { searchSearxng } from '../searxng';
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
|
||||
@@ -39,10 +39,11 @@ export const searchSearxng = async (
|
||||
});
|
||||
}
|
||||
|
||||
const res = await axios.get(url.toString());
|
||||
const res = await fetch(url);
|
||||
const data = await res.json();
|
||||
|
||||
const results: SearxngSearchResult[] = res.data.results;
|
||||
const suggestions: string[] = res.data.suggestions;
|
||||
const results: SearxngSearchResult[] = data.results;
|
||||
const suggestions: string[] = data.suggestions;
|
||||
|
||||
return { results, suggestions };
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import axios from 'axios';
|
||||
import { htmlToText } from 'html-to-text';
|
||||
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import pdfParse from 'pdf-parse';
|
||||
|
||||
|
||||
Reference in New Issue
Block a user