From 328b12ffbe1077d5ec7e5d5bbd85bc034926470d Mon Sep 17 00:00:00 2001 From: akubesti Date: Thu, 11 Sep 2025 16:38:01 +0700 Subject: [PATCH] feat: add new OpenAI models with proper temperature parameter handling - Add GPT 4.1 series and o1/o3/o4 models with temperature compatibility fixes - Remove gpt-5/gpt-5-mini models due to organization verification restrictions - Fix 400 errors for models that only support default temperature values --- src/lib/providers/index.ts | 6 +++++- src/lib/providers/openai.ts | 40 ++++++++++++++++++++++++++++--------- 2 files changed, 36 insertions(+), 10 deletions(-) diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts index 1b6bb2f..3b3815f 100644 --- a/src/lib/providers/index.ts +++ b/src/lib/providers/index.ts @@ -120,7 +120,11 @@ export const getAvailableChatModelProviders = async () => { model: new ChatOpenAI({ apiKey: customOpenAiApiKey, modelName: customOpenAiModelName, - temperature: 0.7, + ...((() => { + const temperatureRestrictedModels = ['gpt-5-nano', 'o1', 'o3-mini', 'o4-mini']; + const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel)); + return isTemperatureRestricted ? {} : { temperature: 0.7 }; + })()), configuration: { baseURL: customOpenAiApiUrl, }, diff --git a/src/lib/providers/openai.ts b/src/lib/providers/openai.ts index 7e26763..c15b35b 100644 --- a/src/lib/providers/openai.ts +++ b/src/lib/providers/openai.ts @@ -26,6 +26,10 @@ const openaiChatModels: Record[] = [ displayName: 'GPT-4 omni', key: 'gpt-4o', }, + { + displayName: 'GPT-4o (2024-05-13)', + key: 'gpt-4o-2024-05-13', + }, { displayName: 'GPT-4 omni mini', key: 'gpt-4o-mini', @@ -47,12 +51,20 @@ const openaiChatModels: Record[] = [ key: 'gpt-5-nano', }, { - displayName: 'GPT 5 mini', - key: 'gpt-5-mini', + displayName: 'GPT 5 Chat Latest', + key: 'gpt-5-chat-latest', }, { - displayName: 'GPT 5', - key: 'gpt-5', + displayName: 'o1', + key: 'o1', + }, + { + displayName: 'o3 Mini', + key: 'o3-mini', + }, + { + displayName: 'o4 Mini', + key: 'o4-mini', }, ]; @@ -76,13 +88,23 @@ export const loadOpenAIChatModels = async () => { const chatModels: Record = {}; openaiChatModels.forEach((model) => { + // Models that only support temperature = 1 + const temperatureRestrictedModels = ['gpt-5-nano', 'o1', 'o3-mini', 'o4-mini']; + const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel)); + + const modelConfig: any = { + apiKey: openaiApiKey, + modelName: model.key, + }; + + // Only add temperature if the model supports it + if (!isTemperatureRestricted) { + modelConfig.temperature = 0.7; + } + chatModels[model.key] = { displayName: model.displayName, - model: new ChatOpenAI({ - apiKey: openaiApiKey, - modelName: model.key, - temperature: model.key.includes('gpt-5') ? 1 : 0.7, - }) as unknown as BaseChatModel, + model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel, }; });