Implement provider formatting improvements and fix client-side compatibility

- Add PROVIDER_INFO metadata to each provider file with proper display names
- Create centralized PROVIDER_METADATA in index.ts for consistent reference
- Update settings UI to use provider metadata for display names
- Fix client/server compatibility for Node.js modules in config.ts
This commit is contained in:
haddadrm
2025-04-01 19:26:15 +04:00
parent aa240009ab
commit 7e1dc33a08
11 changed files with 97 additions and 26 deletions

View File

@ -1,19 +1,34 @@
import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels } from './openai';
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels, PROVIDER_INFO as OpenAIInfo, PROVIDER_INFO } from './openai';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
import { ChatOpenAI } from '@langchain/openai';
import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama';
import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
import { loadTransformersEmbeddingsModels } from './transformers';
import { loadDeepseekChatModels } from './deepseek';
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels } from './lmstudio';
import { loadOllamaChatModels, loadOllamaEmbeddingModels, PROVIDER_INFO as OllamaInfo } from './ollama';
import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq';
import { loadAnthropicChatModels, PROVIDER_INFO as AnthropicInfo } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels, PROVIDER_INFO as GeminiInfo } from './gemini';
import { loadTransformersEmbeddingsModels, PROVIDER_INFO as TransformersInfo } from './transformers';
import { loadDeepseekChatModels, PROVIDER_INFO as DeepseekInfo } from './deepseek';
import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels, PROVIDER_INFO as LMStudioInfo } from './lmstudio';
export const PROVIDER_METADATA = {
openai: OpenAIInfo,
ollama: OllamaInfo,
groq: GroqInfo,
anthropic: AnthropicInfo,
gemini: GeminiInfo,
transformers: TransformersInfo,
deepseek: DeepseekInfo,
lmstudio: LMStudioInfo,
custom_openai: {
key: 'custom_openai',
displayName: 'Custom OpenAI'
}
};
export interface ChatModel {
displayName: string;