feat(ollama-llm): remove explicit think parameter setting

This commit is contained in:
ItzCrazyKns
2025-11-27 11:10:04 +05:30
parent 7544bbafaf
commit f83bd06e89

View File

@@ -98,7 +98,6 @@ class OllamaLLM extends BaseLLM<OllamaConfig> {
model: this.config.model,
messages: input.messages,
format: z.toJSONSchema(input.schema),
think: false,
options: {
top_p: this.config.options?.topP,
temperature: 0.7,
@@ -126,7 +125,6 @@ class OllamaLLM extends BaseLLM<OllamaConfig> {
messages: input.messages,
format: z.toJSONSchema(input.schema),
stream: true,
think: false,
options: {
top_p: this.config.options?.topP,
temperature: 0.7,