From 7757bbd25382c5795dab53aced31311b7d72ece8 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 29 Nov 2025 12:57:29 +0530 Subject: [PATCH] feat(ollama-llm): explicitly disable think for reasoning models --- src/lib/models/providers/ollama/ollamaLLM.ts | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/lib/models/providers/ollama/ollamaLLM.ts b/src/lib/models/providers/ollama/ollamaLLM.ts index 1089cb9..548818e 100644 --- a/src/lib/models/providers/ollama/ollamaLLM.ts +++ b/src/lib/models/providers/ollama/ollamaLLM.ts @@ -16,6 +16,14 @@ type OllamaConfig = { options?: GenerateOptions; }; +const reasoningModels = [ + 'gpt-oss', + 'deepseek-r1', + 'qwen3', + 'deepseek-v3.1', + 'magistral', +]; + class OllamaLLM extends BaseLLM { ollamaClient: Ollama; @@ -98,6 +106,9 @@ class OllamaLLM extends BaseLLM { model: this.config.model, messages: input.messages, format: z.toJSONSchema(input.schema), + ...(reasoningModels.find((m) => this.config.model.includes(m)) + ? { think: false } + : {}), options: { top_p: this.config.options?.topP, temperature: 0.7, @@ -125,6 +136,9 @@ class OllamaLLM extends BaseLLM { messages: input.messages, format: z.toJSONSchema(input.schema), stream: true, + ...(reasoningModels.find((m) => this.config.model.includes(m)) + ? { think: false } + : {}), options: { top_p: this.config.options?.topP, temperature: 0.7,