feat: Add LM Studio Support and Thinking Model Panel

LM Studio Integration:
- Added LM Studio provider with OpenAI-compatible API support
- Dynamic model discovery via /v1/models endpoint
- Support for both chat and embeddings models
- Docker-compatible networking configuration

Thinking Model Panel:
- Added collapsible UI panel for model's chain of thought
- Parses responses with <think> tags to separate reasoning
- Maintains backward compatibility with regular responses
- Styled consistently with app theme for light/dark modes
- Preserves all existing message functionality (sources, markdown, etc.)

These improvements enhance the app's compatibility with local LLMs and
provide better visibility into model reasoning processes while maintaining
existing functionality.
This commit is contained in:
haddadrm
2025-01-26 18:18:35 +04:00
parent 0737701de0
commit 6edac6938c
5 changed files with 191 additions and 18 deletions

View File

@ -16,9 +16,10 @@ interface Config {
ANTHROPIC: string;
GEMINI: string;
};
API_ENDPOINTS: {
SEARXNG: string;
API_ENDPOINTS: {
OLLAMA: string;
LMSTUDIO: string;
SEARXNG: string;
};
}
@ -51,6 +52,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getLMStudioApiEndpoint = () => loadConfig().API_ENDPOINTS.LMSTUDIO;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();
@ -72,6 +75,27 @@ export const updateConfig = (config: RecursivePartial<Config>) => {
}
}
/*
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();
// Merge existing config with new values
const mergedConfig: RecursivePartial<Config> = {
GENERAL: {
...currentConfig.GENERAL,
...config.GENERAL,
},
API_KEYS: {
...currentConfig.API_KEYS,
...config.API_KEYS,
},
API_ENDPOINTS: {
...currentConfig.API_ENDPOINTS,
...config.API_ENDPOINTS,
},
};
*/
fs.writeFileSync(
path.join(__dirname, `../${configFileName}`),
toml.stringify(config),