feat(providers): move providers to UI

This commit is contained in:
ItzCrazyKns
2025-03-18 10:24:51 +05:30
parent e9e34ddff9
commit 6c227cab6f
2 changed files with 160 additions and 0 deletions

72
ui/lib/providers/index.ts Normal file
View File

@ -0,0 +1,72 @@
import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { loadOpenAIChatModels, loadOpenAIEmbeddingModels } from './openai';
import { getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName } from '../config';
import { ChatOpenAI } from '@langchain/openai';
export interface ChatModelProvider {
displayName: string
model: BaseChatModel
}
export interface EmbeddingModelProvider {
displayName: string
model: Embeddings
}
const chatModelProviders: Record<string, () => Promise<Record<string, ChatModelProvider>>> = {
openai: loadOpenAIChatModels
}
const embeddingModelProviders: Record<string, () => Promise<Record<string, EmbeddingModelProvider>>> = {
openai: loadOpenAIEmbeddingModels
}
export const getAvailableChatModelProviders = async () => {
const models: Record<string, Record<string, ChatModelProvider>> = {};
for (const provider in chatModelProviders) {
const providerModels = await chatModelProviders[provider]();
if (Object.keys(providerModels).length > 0) {
models[provider] = providerModels;
}
}
const customOpenAiApiKey = getCustomOpenaiApiKey();
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
? {
[customOpenAiModelName]: {
displayName: customOpenAiModelName,
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
temperature: 0.7,
configuration: {
baseURL: customOpenAiApiUrl,
},
}),
},
}
: {}),
};
return models;
};
export const getAvailableEmbeddingModelProviders = async () => {
const models: Record<string, Record<string, EmbeddingModelProvider>> = {};
for (const provider in embeddingModelProviders) {
const providerModels = await embeddingModelProviders[provider]();
if (Object.keys(providerModels).length > 0) {
models[provider] = providerModels;
}
}
return models;
};

View File

@ -0,0 +1,88 @@
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'
import { getOpenaiApiKey } from '../config'
import { ChatModelProvider, EmbeddingModelProvider } from '.'
const openaiChatModels: Record<string, string>[] = [
{
displayName: 'GPT-3.5 Turbo',
key: 'gpt-3.5-turbo',
},
{
displayName: 'GPT-4',
key: 'gpt-4',
},
{
displayName: 'GPT-4 turbo',
key: 'gpt-4-turbo',
},
{
displayName: 'GPT-4 omni',
key: 'gpt-4o',
},
{
displayName: 'GPT-4 omni mini',
key: 'gpt-4o-mini',
}
]
const openaiEmbeddingModels: Record<string, string>[] = [
{
displayName: 'Text Embedding 3 Small',
key: 'text-embedding-3-small',
},
{
displayName: 'Text Embedding 3 Large',
key: 'text-embedding-3-large',
}
]
export const loadOpenAIChatModels = async () => {
const openaiApiKey = getOpenaiApiKey()
if (!openaiApiKey) return {}
try {
const chatModels: Record<string, ChatModelProvider> = {}
openaiChatModels.forEach(model => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: openaiApiKey,
modelName: model.key,
temperature: 0.7
})
}
})
return chatModels
} catch(err) {
console.error(`Error loading OpenAI models: ${err}`)
return {}
}
}
export const loadOpenAIEmbeddingModels = async () => {
const openaiApiKey = getOpenaiApiKey()
if (!openaiApiKey) return {}
try {
const embeddingModels: Record<string, EmbeddingModelProvider> = {}
openaiEmbeddingModels.forEach(model => {
embeddingModels[model.key] = {
displayName: model.displayName,
model: new OpenAIEmbeddings({
openAIApiKey: openaiApiKey,
modelName: model.key,
})
}
})
return embeddingModels
} catch(err) {
console.error(`Error loading OpenAI embeddings models: ${err}`)
return {}
}
}