This commit is contained in:
ItzCrazyKns
2025-09-25 18:48:27 +05:30
9 changed files with 236 additions and 12 deletions

View File

@@ -10,6 +10,8 @@ import {
getDeepseekApiKey,
getAimlApiKey,
getLMStudioApiEndpoint,
getLemonadeApiEndpoint,
getLemonadeApiKey,
updateConfig,
getOllamaApiKey,
} from '@/lib/config';
@@ -56,6 +58,8 @@ export const GET = async (req: Request) => {
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['lemonadeApiUrl'] = getLemonadeApiEndpoint();
config['lemonadeApiKey'] = getLemonadeApiKey();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey();
@@ -106,6 +110,10 @@ export const POST = async (req: Request) => {
LM_STUDIO: {
API_URL: config.lmStudioApiUrl,
},
LEMONADE: {
API_URL: config.lemonadeApiUrl,
API_KEY: config.lemonadeApiKey,
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,

View File

@@ -23,6 +23,8 @@ interface SettingsType {
ollamaApiUrl: string;
ollamaApiKey: string;
lmStudioApiUrl: string;
lemonadeApiUrl: string;
lemonadeApiKey: string;
deepseekApiKey: string;
aimlApiKey: string;
customOpenaiApiKey: string;
@@ -953,6 +955,48 @@ const Page = () => {
</div>
</div>
</SettingsSection>
<SettingsSection title="Lemonade">
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Lemonade API URL
</p>
<Input
type="text"
placeholder="Lemonade API URL"
value={config.lemonadeApiUrl}
isSaving={savingStates['lemonadeApiUrl']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
lemonadeApiUrl: e.target.value,
}));
}}
onSave={(value) => saveConfig('lemonadeApiUrl', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Lemonade API Key (Optional)
</p>
<Input
type="password"
placeholder="Lemonade API Key"
value={config.lemonadeApiKey}
isSaving={savingStates['lemonadeApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
lemonadeApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('lemonadeApiKey', value)}
/>
</div>
</div>
</SettingsSection>
</div>
)
)}

View File

@@ -107,8 +107,8 @@ const AttachSmall = () => {
key={i}
className="flex flex-row items-center justify-start w-full space-x-3 p-3"
>
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-white/70" />
<div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-black/70 dark:text-white/70" />
</div>
<p className="text-black/70 dark:text-white/70 text-sm">
{file.fileName.length > 25

View File

@@ -42,6 +42,10 @@ interface Config {
LM_STUDIO: {
API_URL: string;
};
LEMONADE: {
API_URL: string;
API_KEY: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
@@ -105,6 +109,11 @@ export const getCustomOpenaiModelName = () =>
export const getLMStudioApiEndpoint = () =>
loadConfig().MODELS.LM_STUDIO.API_URL;
export const getLemonadeApiEndpoint = () =>
loadConfig().MODELS.LEMONADE.API_URL;
export const getLemonadeApiKey = () => loadConfig().MODELS.LEMONADE.API_KEY;
const mergeConfigs = (current: any, update: any): any => {
if (update === null || update === undefined) {
return current;

View File

@@ -45,6 +45,11 @@ import {
loadLMStudioEmbeddingsModels,
PROVIDER_INFO as LMStudioInfo,
} from './lmstudio';
import {
loadLemonadeChatModels,
loadLemonadeEmbeddingModels,
PROVIDER_INFO as LemonadeInfo,
} from './lemonade';
export const PROVIDER_METADATA = {
openai: OpenAIInfo,
@@ -56,6 +61,7 @@ export const PROVIDER_METADATA = {
deepseek: DeepseekInfo,
aimlapi: AimlApiInfo,
lmstudio: LMStudioInfo,
lemonade: LemonadeInfo,
custom_openai: {
key: 'custom_openai',
displayName: 'Custom OpenAI',
@@ -84,6 +90,7 @@ export const chatModelProviders: Record<
deepseek: loadDeepseekChatModels,
aimlapi: loadAimlApiChatModels,
lmstudio: loadLMStudioChatModels,
lemonade: loadLemonadeChatModels,
};
export const embeddingModelProviders: Record<
@@ -96,6 +103,7 @@ export const embeddingModelProviders: Record<
transformers: loadTransformersEmbeddingsModels,
aimlapi: loadAimlApiEmbeddingModels,
lmstudio: loadLMStudioEmbeddingsModels,
lemonade: loadLemonadeEmbeddingModels,
};
export const getAvailableChatModelProviders = async () => {
@@ -120,7 +128,11 @@ export const getAvailableChatModelProviders = async () => {
model: new ChatOpenAI({
apiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
temperature: 0.7,
...((() => {
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel));
return isTemperatureRestricted ? {} : { temperature: 0.7 };
})()),
configuration: {
baseURL: customOpenAiApiUrl,
},

View File

@@ -0,0 +1,94 @@
import axios from 'axios';
import { getLemonadeApiEndpoint, getLemonadeApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
key: 'lemonade',
displayName: 'Lemonade',
};
import { ChatOpenAI } from '@langchain/openai';
import { OpenAIEmbeddings } from '@langchain/openai';
export const loadLemonadeChatModels = async () => {
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
const lemonadeApiKey = getLemonadeApiKey();
if (!lemonadeApiEndpoint) return {};
try {
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
headers: {
'Content-Type': 'application/json',
...(lemonadeApiKey
? { Authorization: `Bearer ${lemonadeApiKey}` }
: {}),
},
});
const { data: models } = res.data;
const chatModels: Record<string, ChatModel> = {};
models.forEach((model: any) => {
chatModels[model.id] = {
displayName: model.id,
model: new ChatOpenAI({
apiKey: lemonadeApiKey || 'lemonade-key',
modelName: model.id,
temperature: 0.7,
configuration: {
baseURL: `${lemonadeApiEndpoint}/api/v1`,
},
}),
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Lemonade models: ${err}`);
return {};
}
};
export const loadLemonadeEmbeddingModels = async () => {
const lemonadeApiEndpoint = getLemonadeApiEndpoint();
const lemonadeApiKey = getLemonadeApiKey();
if (!lemonadeApiEndpoint) return {};
try {
const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, {
headers: {
'Content-Type': 'application/json',
...(lemonadeApiKey
? { Authorization: `Bearer ${lemonadeApiKey}` }
: {}),
},
});
const { data: models } = res.data;
const embeddingModels: Record<string, EmbeddingModel> = {};
// Filter models that support embeddings (if Lemonade provides this info)
// For now, we'll assume all models can be used for embeddings
models.forEach((model: any) => {
embeddingModels[model.id] = {
displayName: model.id,
model: new OpenAIEmbeddings({
apiKey: lemonadeApiKey || 'lemonade-key',
modelName: model.id,
configuration: {
baseURL: `${lemonadeApiEndpoint}/api/v1`,
},
}),
};
});
return embeddingModels;
} catch (err) {
console.error(`Error loading Lemonade embedding models: ${err}`);
return {};
}
};

View File

@@ -26,6 +26,10 @@ const openaiChatModels: Record<string, string>[] = [
displayName: 'GPT-4 omni',
key: 'gpt-4o',
},
{
displayName: 'GPT-4o (2024-05-13)',
key: 'gpt-4o-2024-05-13',
},
{
displayName: 'GPT-4 omni mini',
key: 'gpt-4o-mini',
@@ -47,12 +51,28 @@ const openaiChatModels: Record<string, string>[] = [
key: 'gpt-5-nano',
},
{
displayName: 'GPT 5 mini',
displayName: 'GPT 5',
key: 'gpt-5',
},
{
displayName: 'GPT 5 Mini',
key: 'gpt-5-mini',
},
{
displayName: 'GPT 5',
key: 'gpt-5',
displayName: 'o1',
key: 'o1',
},
{
displayName: 'o3',
key: 'o3',
},
{
displayName: 'o3 Mini',
key: 'o3-mini',
},
{
displayName: 'o4 Mini',
key: 'o4-mini',
},
];
@@ -76,13 +96,23 @@ export const loadOpenAIChatModels = async () => {
const chatModels: Record<string, ChatModel> = {};
openaiChatModels.forEach((model) => {
// Models that only support temperature = 1
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel));
const modelConfig: any = {
apiKey: openaiApiKey,
modelName: model.key,
};
// Only add temperature if the model supports it
if (!isTemperatureRestricted) {
modelConfig.temperature = 0.7;
}
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
apiKey: openaiApiKey,
modelName: model.key,
temperature: model.key.includes('gpt-5') ? 1 : 0.7,
}) as unknown as BaseChatModel,
model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
};
});