import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { Model, ModelList, ProviderMetadata } from '../types'; import BaseModelProvider from './baseProvider'; import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { Embeddings } from '@langchain/core/embeddings'; import { UIConfigField } from '@/lib/config/types'; import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; interface OpenAIConfig { apiKey: string; baseURL: string; } const defaultChatModels: Model[] = [ { name: 'GPT-3.5 Turbo', key: 'gpt-3.5-turbo', }, { name: 'GPT-4', key: 'gpt-4', }, { name: 'GPT-4 turbo', key: 'gpt-4-turbo', }, { name: 'GPT-4 omni', key: 'gpt-4o', }, { name: 'GPT-4o (2024-05-13)', key: 'gpt-4o-2024-05-13', }, { name: 'GPT-4 omni mini', key: 'gpt-4o-mini', }, { name: 'GPT 4.1 nano', key: 'gpt-4.1-nano', }, { name: 'GPT 4.1 mini', key: 'gpt-4.1-mini', }, { name: 'GPT 4.1', key: 'gpt-4.1', }, { name: 'GPT 5 nano', key: 'gpt-5-nano', }, { name: 'GPT 5', key: 'gpt-5', }, { name: 'GPT 5 Mini', key: 'gpt-5-mini', }, { name: 'o1', key: 'o1', }, { name: 'o3', key: 'o3', }, { name: 'o3 Mini', key: 'o3-mini', }, { name: 'o4 Mini', key: 'o4-mini', }, ]; const defaultEmbeddingModels: Model[] = [ { name: 'Text Embedding 3 Small', key: 'text-embedding-3-small', }, { name: 'Text Embedding 3 Large', key: 'text-embedding-3-large', }, ]; const providerConfigFields: UIConfigField[] = [ { type: 'password', name: 'API Key', key: 'apiKey', description: 'Your OpenAI API key', required: true, placeholder: 'OpenAI API Key', env: 'OPENAI_API_KEY', scope: 'server', }, { type: 'string', name: 'Base URL', key: 'baseURL', description: 'The base URL for the OpenAI API', required: true, placeholder: 'OpenAI Base URL', default: 'https://api.openai.com/v1', env: 'OPENAI_BASE_URL', scope: 'server', }, ]; class OpenAIProvider extends BaseModelProvider { constructor(id: string, name: string, config: OpenAIConfig) { super(id, name, config); } async getDefaultModels(): Promise { if (this.config.baseURL === 'https://api.openai.com/v1') { return { embedding: defaultEmbeddingModels, chat: defaultChatModels, }; } return { embedding: [], chat: [], }; } async getModelList(): Promise { const defaultModels = await this.getDefaultModels(); const configProvider = getConfiguredModelProviderById(this.id)!; return { embedding: [ ...defaultModels.embedding, ...configProvider.embeddingModels, ], chat: [...defaultModels.chat, ...configProvider.chatModels], }; } async loadChatModel(key: string): Promise { const modelList = await this.getModelList(); const exists = modelList.chat.find((m) => m.key === key); if (!exists) { throw new Error( 'Error Loading OpenAI Chat Model. Invalid Model Selected', ); } return new ChatOpenAI({ apiKey: this.config.apiKey, temperature: 0.7, model: key, configuration: { baseURL: this.config.baseURL, }, }); } async loadEmbeddingModel(key: string): Promise { const modelList = await this.getModelList(); const exists = modelList.embedding.find((m) => m.key === key); if (!exists) { throw new Error( 'Error Loading OpenAI Embedding Model. Invalid Model Selected.', ); } return new OpenAIEmbeddings({ apiKey: this.config.apiKey, model: key, configuration: { baseURL: this.config.baseURL, }, }); } static parseAndValidate(raw: any): OpenAIConfig { if (!raw || typeof raw !== 'object') throw new Error('Invalid config provided. Expected object'); if (!raw.apiKey || !raw.baseURL) throw new Error( 'Invalid config provided. API key and base URL must be provided', ); return { apiKey: String(raw.apiKey), baseURL: String(raw.baseURL), }; } static getProviderConfigFields(): UIConfigField[] { return providerConfigFields; } static getProviderMetadata(): ProviderMetadata { return { key: 'openai', name: 'OpenAI', }; } } export default OpenAIProvider;