From 2bdcbf20fb9a08446ca9dc35741e107bdbcbe0c3 Mon Sep 17 00:00:00 2001
From: Willie Zutz
Date: Sat, 15 Feb 2025 16:03:24 -0700
Subject: [PATCH 1/5] User customizable context window for ollama models.
---
src/routes/images.ts | 6 +++
src/routes/search.ts | 6 +++
src/routes/suggestions.ts | 5 ++
src/routes/videos.ts | 6 +++
src/websocket/connectionManager.ts | 6 +++
ui/app/settings/page.tsx | 75 ++++++++++++++++++++++++++++++
ui/components/ChatWindow.tsx | 2 +
ui/components/SearchImages.tsx | 5 +-
ui/components/SearchVideos.tsx | 5 +-
ui/lib/actions.ts | 4 ++
10 files changed, 118 insertions(+), 2 deletions(-)
diff --git a/src/routes/images.ts b/src/routes/images.ts
index 5671657..2e8e912 100644
--- a/src/routes/images.ts
+++ b/src/routes/images.ts
@@ -5,6 +5,7 @@ import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai';
+import { ChatOllama } from '@langchain/community/chat_models/ollama';
import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
@@ -16,6 +17,7 @@ const router = express.Router();
interface ChatModel {
provider: string;
model: string;
+ ollamaContextWindow?: number;
}
interface ImageSearchBody {
@@ -61,6 +63,10 @@ router.post('/', async (req, res) => {
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
+
+ if (llm instanceof ChatOllama) {
+ llm.numCtx = body.chatModel?.ollamaContextWindow || 2048;
+ }
}
if (!llm) {
diff --git a/src/routes/search.ts b/src/routes/search.ts
index 57d90a3..daefece 100644
--- a/src/routes/search.ts
+++ b/src/routes/search.ts
@@ -15,12 +15,14 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
+import { ChatOllama } from '@langchain/community/chat_models/ollama';
const router = express.Router();
interface chatModel {
provider: string;
model: string;
+ ollamaContextWindow?: number;
customOpenAIKey?: string;
customOpenAIBaseURL?: string;
}
@@ -78,6 +80,7 @@ router.post('/', async (req, res) => {
const embeddingModel =
body.embeddingModel?.model ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
+ const ollamaContextWindow = body.chatModel?.ollamaContextWindow || 2048;
let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined;
@@ -99,6 +102,9 @@ router.post('/', async (req, res) => {
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
+ if (llm instanceof ChatOllama) {
+ llm.numCtx = ollamaContextWindow;
+ }
}
if (
diff --git a/src/routes/suggestions.ts b/src/routes/suggestions.ts
index 7dd1739..c7a1409 100644
--- a/src/routes/suggestions.ts
+++ b/src/routes/suggestions.ts
@@ -10,12 +10,14 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
+import { ChatOllama } from '@langchain/community/chat_models/ollama';
const router = express.Router();
interface ChatModel {
provider: string;
model: string;
+ ollamaContextWindow?: number;
}
interface SuggestionsBody {
@@ -60,6 +62,9 @@ router.post('/', async (req, res) => {
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
+ if (llm instanceof ChatOllama) {
+ llm.numCtx = body.chatModel?.ollamaContextWindow || 2048;
+ }
}
if (!llm) {
diff --git a/src/routes/videos.ts b/src/routes/videos.ts
index b631f26..debe3cd 100644
--- a/src/routes/videos.ts
+++ b/src/routes/videos.ts
@@ -10,12 +10,14 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
+import { ChatOllama } from '@langchain/community/chat_models/ollama';
const router = express.Router();
interface ChatModel {
provider: string;
model: string;
+ ollamaContextWindow?: number;
}
interface VideoSearchBody {
@@ -61,6 +63,10 @@ router.post('/', async (req, res) => {
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
+
+ if (llm instanceof ChatOllama) {
+ llm.numCtx = body.chatModel?.ollamaContextWindow || 2048;
+ }
}
if (!llm) {
diff --git a/src/websocket/connectionManager.ts b/src/websocket/connectionManager.ts
index bb8f242..979b8a0 100644
--- a/src/websocket/connectionManager.ts
+++ b/src/websocket/connectionManager.ts
@@ -14,6 +14,7 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '../config';
+import { ChatOllama } from '@langchain/community/chat_models/ollama';
export const handleConnection = async (
ws: WebSocket,
@@ -42,6 +43,8 @@ export const handleConnection = async (
searchParams.get('embeddingModel') ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
+ const ollamaContextWindow = searchParams.get('ollamaContextWindow');
+
let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined;
@@ -52,6 +55,9 @@ export const handleConnection = async (
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
+ if (llm instanceof ChatOllama) {
+ llm.numCtx = ollamaContextWindow ? parseInt(ollamaContextWindow) : 2048;
+ }
} else if (chatModelProvider == 'custom_openai') {
const customOpenaiApiKey = getCustomOpenaiApiKey();
const customOpenaiApiUrl = getCustomOpenaiApiUrl();
diff --git a/ui/app/settings/page.tsx b/ui/app/settings/page.tsx
index 371d091..37a2f6b 100644
--- a/ui/app/settings/page.tsx
+++ b/ui/app/settings/page.tsx
@@ -23,6 +23,7 @@ interface SettingsType {
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
+ ollamaContextWindow: number;
}
interface InputProps extends React.InputHTMLAttributes {
@@ -112,6 +113,11 @@ const Page = () => {
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
const [savingStates, setSavingStates] = useState>({});
+ const [contextWindowSize, setContextWindowSize] = useState(2048);
+ const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
+ const predefinedContextSizes = [
+ 1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072
+ ];
useEffect(() => {
const fetchConfig = async () => {
@@ -123,6 +129,7 @@ const Page = () => {
});
const data = (await res.json()) as SettingsType;
+
setConfig(data);
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
@@ -171,6 +178,9 @@ const Page = () => {
setAutomaticVideoSearch(
localStorage.getItem('autoVideoSearch') === 'true',
);
+ const storedContextWindow = parseInt(localStorage.getItem('ollamaContextWindow') ?? '2048');
+ setContextWindowSize(storedContextWindow);
+ setIsCustomContextWindow(!predefinedContextSizes.includes(storedContextWindow));
setIsLoading(false);
};
@@ -331,6 +341,8 @@ const Page = () => {
localStorage.setItem('embeddingModelProvider', value);
} else if (key === 'embeddingModel') {
localStorage.setItem('embeddingModel', value);
+ } else if (key === 'ollamaContextWindow') {
+ localStorage.setItem('ollamaContextWindow', value.toString());
}
} catch (err) {
console.error('Failed to save:', err);
@@ -548,6 +560,69 @@ const Page = () => {
];
})()}
/>
+ {selectedChatModelProvider === 'ollama' && (
+
+
+ Chat Context Window Size
+
+
+ )}
)}
diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx
index 1940f42..366048f 100644
--- a/ui/components/ChatWindow.tsx
+++ b/ui/components/ChatWindow.tsx
@@ -197,6 +197,8 @@ const useSocket = (
'openAIBaseURL',
localStorage.getItem('openAIBaseURL')!,
);
+ } else {
+ searchParams.append('ollamaContextWindow', localStorage.getItem('ollamaContextWindow') || '2048')
}
searchParams.append('embeddingModel', embeddingModel!);
diff --git a/ui/components/SearchImages.tsx b/ui/components/SearchImages.tsx
index 383f780..21778b8 100644
--- a/ui/components/SearchImages.tsx
+++ b/ui/components/SearchImages.tsx
@@ -33,9 +33,9 @@ const SearchImages = ({
const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel');
-
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
+ const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/images`,
@@ -54,6 +54,9 @@ const SearchImages = ({
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
+ ...(chatModelProvider === 'ollama' && {
+ ollamaContextWindow: parseInt(ollamaContextWindow),
+ }),
},
}),
},
diff --git a/ui/components/SearchVideos.tsx b/ui/components/SearchVideos.tsx
index c284dc2..4a67d47 100644
--- a/ui/components/SearchVideos.tsx
+++ b/ui/components/SearchVideos.tsx
@@ -48,9 +48,9 @@ const Searchvideos = ({
const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel');
-
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
+ const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/videos`,
@@ -69,6 +69,9 @@ const Searchvideos = ({
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
+ ...(chatModelProvider === 'ollama' && {
+ ollamaContextWindow: parseInt(ollamaContextWindow),
+ }),
},
}),
},
diff --git a/ui/lib/actions.ts b/ui/lib/actions.ts
index a4409b0..4191733 100644
--- a/ui/lib/actions.ts
+++ b/ui/lib/actions.ts
@@ -6,6 +6,7 @@ export const getSuggestions = async (chatHisory: Message[]) => {
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
+ const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
method: 'POST',
@@ -21,6 +22,9 @@ export const getSuggestions = async (chatHisory: Message[]) => {
customOpenAIKey,
customOpenAIBaseURL,
}),
+ ...(chatModelProvider === 'ollama' && {
+ ollamaContextWindow: parseInt(ollamaContextWindow),
+ }),
},
}),
});
From 18b6f5b6749243a81338923823ce175be46e4758 Mon Sep 17 00:00:00 2001
From: Willie Zutz
Date: Sat, 15 Feb 2025 16:07:19 -0700
Subject: [PATCH 2/5] Updated formatting
---
ui/app/settings/page.tsx | 37 +++++++++++++++++++++++-----------
ui/components/ChatWindow.tsx | 5 ++++-
ui/components/SearchImages.tsx | 3 ++-
ui/components/SearchVideos.tsx | 3 ++-
ui/lib/actions.ts | 3 ++-
5 files changed, 35 insertions(+), 16 deletions(-)
diff --git a/ui/app/settings/page.tsx b/ui/app/settings/page.tsx
index 37a2f6b..26cdbd6 100644
--- a/ui/app/settings/page.tsx
+++ b/ui/app/settings/page.tsx
@@ -116,7 +116,7 @@ const Page = () => {
const [contextWindowSize, setContextWindowSize] = useState(2048);
const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
const predefinedContextSizes = [
- 1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072
+ 1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072,
];
useEffect(() => {
@@ -178,9 +178,13 @@ const Page = () => {
setAutomaticVideoSearch(
localStorage.getItem('autoVideoSearch') === 'true',
);
- const storedContextWindow = parseInt(localStorage.getItem('ollamaContextWindow') ?? '2048');
+ const storedContextWindow = parseInt(
+ localStorage.getItem('ollamaContextWindow') ?? '2048',
+ );
setContextWindowSize(storedContextWindow);
- setIsCustomContextWindow(!predefinedContextSizes.includes(storedContextWindow));
+ setIsCustomContextWindow(
+ !predefinedContextSizes.includes(storedContextWindow),
+ );
setIsLoading(false);
};
@@ -566,7 +570,11 @@ const Page = () => {
Chat Context Window Size