mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-13 13:01:33 +00:00
Merge pull request #866 from agungbesti/feat/add-openai-models
feat: add new OpenAI models with proper temperature parameter handling
This commit is contained in:
@@ -120,7 +120,11 @@ export const getAvailableChatModelProviders = async () => {
|
|||||||
model: new ChatOpenAI({
|
model: new ChatOpenAI({
|
||||||
apiKey: customOpenAiApiKey,
|
apiKey: customOpenAiApiKey,
|
||||||
modelName: customOpenAiModelName,
|
modelName: customOpenAiModelName,
|
||||||
temperature: 0.7,
|
...((() => {
|
||||||
|
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
|
||||||
|
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel));
|
||||||
|
return isTemperatureRestricted ? {} : { temperature: 0.7 };
|
||||||
|
})()),
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL: customOpenAiApiUrl,
|
baseURL: customOpenAiApiUrl,
|
||||||
},
|
},
|
||||||
|
@@ -26,6 +26,10 @@ const openaiChatModels: Record<string, string>[] = [
|
|||||||
displayName: 'GPT-4 omni',
|
displayName: 'GPT-4 omni',
|
||||||
key: 'gpt-4o',
|
key: 'gpt-4o',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
displayName: 'GPT-4o (2024-05-13)',
|
||||||
|
key: 'gpt-4o-2024-05-13',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
displayName: 'GPT-4 omni mini',
|
displayName: 'GPT-4 omni mini',
|
||||||
key: 'gpt-4o-mini',
|
key: 'gpt-4o-mini',
|
||||||
@@ -47,12 +51,28 @@ const openaiChatModels: Record<string, string>[] = [
|
|||||||
key: 'gpt-5-nano',
|
key: 'gpt-5-nano',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'GPT 5 mini',
|
displayName: 'GPT 5',
|
||||||
|
key: 'gpt-5',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'GPT 5 Mini',
|
||||||
key: 'gpt-5-mini',
|
key: 'gpt-5-mini',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'GPT 5',
|
displayName: 'o1',
|
||||||
key: 'gpt-5',
|
key: 'o1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'o3',
|
||||||
|
key: 'o3',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'o3 Mini',
|
||||||
|
key: 'o3-mini',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'o4 Mini',
|
||||||
|
key: 'o4-mini',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -76,13 +96,23 @@ export const loadOpenAIChatModels = async () => {
|
|||||||
const chatModels: Record<string, ChatModel> = {};
|
const chatModels: Record<string, ChatModel> = {};
|
||||||
|
|
||||||
openaiChatModels.forEach((model) => {
|
openaiChatModels.forEach((model) => {
|
||||||
|
// Models that only support temperature = 1
|
||||||
|
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
|
||||||
|
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel));
|
||||||
|
|
||||||
|
const modelConfig: any = {
|
||||||
|
apiKey: openaiApiKey,
|
||||||
|
modelName: model.key,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Only add temperature if the model supports it
|
||||||
|
if (!isTemperatureRestricted) {
|
||||||
|
modelConfig.temperature = 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
chatModels[model.key] = {
|
chatModels[model.key] = {
|
||||||
displayName: model.displayName,
|
displayName: model.displayName,
|
||||||
model: new ChatOpenAI({
|
model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
|
||||||
apiKey: openaiApiKey,
|
|
||||||
modelName: model.key,
|
|
||||||
temperature: model.key.includes('gpt-5') ? 1 : 0.7,
|
|
||||||
}) as unknown as BaseChatModel,
|
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user