diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts index 1b6bb2f..00ba60f 100644 --- a/src/lib/providers/index.ts +++ b/src/lib/providers/index.ts @@ -120,7 +120,11 @@ export const getAvailableChatModelProviders = async () => { model: new ChatOpenAI({ apiKey: customOpenAiApiKey, modelName: customOpenAiModelName, - temperature: 0.7, + ...((() => { + const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini']; + const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel)); + return isTemperatureRestricted ? {} : { temperature: 0.7 }; + })()), configuration: { baseURL: customOpenAiApiUrl, }, diff --git a/src/lib/providers/openai.ts b/src/lib/providers/openai.ts index 7e26763..828f0d8 100644 --- a/src/lib/providers/openai.ts +++ b/src/lib/providers/openai.ts @@ -26,6 +26,10 @@ const openaiChatModels: Record[] = [ displayName: 'GPT-4 omni', key: 'gpt-4o', }, + { + displayName: 'GPT-4o (2024-05-13)', + key: 'gpt-4o-2024-05-13', + }, { displayName: 'GPT-4 omni mini', key: 'gpt-4o-mini', @@ -47,12 +51,28 @@ const openaiChatModels: Record[] = [ key: 'gpt-5-nano', }, { - displayName: 'GPT 5 mini', + displayName: 'GPT 5', + key: 'gpt-5', + }, + { + displayName: 'GPT 5 Mini', key: 'gpt-5-mini', }, { - displayName: 'GPT 5', - key: 'gpt-5', + displayName: 'o1', + key: 'o1', + }, + { + displayName: 'o3', + key: 'o3', + }, + { + displayName: 'o3 Mini', + key: 'o3-mini', + }, + { + displayName: 'o4 Mini', + key: 'o4-mini', }, ]; @@ -76,13 +96,23 @@ export const loadOpenAIChatModels = async () => { const chatModels: Record = {}; openaiChatModels.forEach((model) => { + // Models that only support temperature = 1 + const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini']; + const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel)); + + const modelConfig: any = { + apiKey: openaiApiKey, + modelName: model.key, + }; + + // Only add temperature if the model supports it + if (!isTemperatureRestricted) { + modelConfig.temperature = 0.7; + } + chatModels[model.key] = { displayName: model.displayName, - model: new ChatOpenAI({ - apiKey: openaiApiKey, - modelName: model.key, - temperature: model.key.includes('gpt-5') ? 1 : 0.7, - }) as unknown as BaseChatModel, + model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel, }; });