Compare commits

..

5 Commits

Author SHA1 Message Date
Willie Zutz
18b6f5b674 Updated formatting 2025-02-15 16:07:19 -07:00
Willie Zutz
2bdcbf20fb User customizable context window for ollama models. 2025-02-15 16:03:24 -07:00
ItzCrazyKns
115e6b2a71 Merge branch 'master' of https://github.com/ItzCrazyKns/Perplexica 2025-02-15 12:52:30 +05:30
ItzCrazyKns
a5c79c92ed feat(settings): add embedding provider settings 2025-02-15 12:52:27 +05:30
ItzCrazyKns
db3cea446e Update UPDATING.md 2025-02-15 12:33:43 +05:30
11 changed files with 297 additions and 24 deletions

View File

@@ -10,23 +10,25 @@ To update Perplexica to the latest version, follow these steps:
git clone https://github.com/ItzCrazyKns/Perplexica.git git clone https://github.com/ItzCrazyKns/Perplexica.git
``` ```
2. Navigate to the Project Directory. 2. Navigate to the project directory.
3. Pull latest images from registry. 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
4. Pull the latest images from the registry.
```bash ```bash
docker compose pull docker compose pull
``` ```
4. Update and Recreate containers. 5. Update and recreate the containers.
```bash ```bash
docker compose up -d docker compose up -d
``` ```
5. Once the command completes running go to http://localhost:3000 and verify the latest changes. 6. Once the command completes, go to http://localhost:3000 and verify the latest changes.
## For non Docker users ## For non-Docker users
1. Clone the latest version of Perplexica from GitHub: 1. Clone the latest version of Perplexica from GitHub:
@@ -34,7 +36,14 @@ To update Perplexica to the latest version, follow these steps:
git clone https://github.com/ItzCrazyKns/Perplexica.git git clone https://github.com/ItzCrazyKns/Perplexica.git
``` ```
2. Navigate to the Project Directory 2. Navigate to the project directory.
3. Execute `npm i` in both the `ui` folder and the root directory.
4. Once packages are updated, execute `npm run build` in both the `ui` folder and the root directory. 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly.
5. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
4. Execute `npm i` in both the `ui` folder and the root directory.
5. Once the packages are updated, execute `npm run build` in both the `ui` folder and the root directory.
6. Finally, start both the frontend and the backend by running `npm run start` in both the `ui` folder and the root directory.
---

View File

@@ -5,6 +5,7 @@ import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { import {
getCustomOpenaiApiKey, getCustomOpenaiApiKey,
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
@@ -16,6 +17,7 @@ const router = express.Router();
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
ollamaContextWindow?: number;
} }
interface ImageSearchBody { interface ImageSearchBody {
@@ -61,6 +63,10 @@ router.post('/', async (req, res) => {
) { ) {
llm = chatModelProviders[chatModelProvider][chatModel] llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined; .model as unknown as BaseChatModel | undefined;
if (llm instanceof ChatOllama) {
llm.numCtx = body.chatModel?.ollamaContextWindow || 2048;
}
} }
if (!llm) { if (!llm) {

View File

@@ -15,12 +15,14 @@ import {
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
getCustomOpenaiModelName, getCustomOpenaiModelName,
} from '../config'; } from '../config';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
const router = express.Router(); const router = express.Router();
interface chatModel { interface chatModel {
provider: string; provider: string;
model: string; model: string;
ollamaContextWindow?: number;
customOpenAIKey?: string; customOpenAIKey?: string;
customOpenAIBaseURL?: string; customOpenAIBaseURL?: string;
} }
@@ -78,6 +80,7 @@ router.post('/', async (req, res) => {
const embeddingModel = const embeddingModel =
body.embeddingModel?.model || body.embeddingModel?.model ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0]; Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
const ollamaContextWindow = body.chatModel?.ollamaContextWindow || 2048;
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined; let embeddings: Embeddings | undefined;
@@ -85,10 +88,12 @@ router.post('/', async (req, res) => {
if (body.chatModel?.provider === 'custom_openai') { if (body.chatModel?.provider === 'custom_openai') {
llm = new ChatOpenAI({ llm = new ChatOpenAI({
modelName: body.chatModel?.model || getCustomOpenaiModelName(), modelName: body.chatModel?.model || getCustomOpenaiModelName(),
openAIApiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), openAIApiKey:
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
temperature: 0.7, temperature: 0.7,
configuration: { configuration: {
baseURL: body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(), baseURL:
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
}, },
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if ( } else if (
@@ -97,6 +102,9 @@ router.post('/', async (req, res) => {
) { ) {
llm = chatModelProviders[chatModelProvider][chatModel] llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined; .model as unknown as BaseChatModel | undefined;
if (llm instanceof ChatOllama) {
llm.numCtx = ollamaContextWindow;
}
} }
if ( if (

View File

@@ -10,12 +10,14 @@ import {
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
getCustomOpenaiModelName, getCustomOpenaiModelName,
} from '../config'; } from '../config';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
const router = express.Router(); const router = express.Router();
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
ollamaContextWindow?: number;
} }
interface SuggestionsBody { interface SuggestionsBody {
@@ -60,6 +62,9 @@ router.post('/', async (req, res) => {
) { ) {
llm = chatModelProviders[chatModelProvider][chatModel] llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined; .model as unknown as BaseChatModel | undefined;
if (llm instanceof ChatOllama) {
llm.numCtx = body.chatModel?.ollamaContextWindow || 2048;
}
} }
if (!llm) { if (!llm) {

View File

@@ -10,12 +10,14 @@ import {
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
getCustomOpenaiModelName, getCustomOpenaiModelName,
} from '../config'; } from '../config';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
const router = express.Router(); const router = express.Router();
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
ollamaContextWindow?: number;
} }
interface VideoSearchBody { interface VideoSearchBody {
@@ -61,6 +63,10 @@ router.post('/', async (req, res) => {
) { ) {
llm = chatModelProviders[chatModelProvider][chatModel] llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined; .model as unknown as BaseChatModel | undefined;
if (llm instanceof ChatOllama) {
llm.numCtx = body.chatModel?.ollamaContextWindow || 2048;
}
} }
if (!llm) { if (!llm) {

View File

@@ -14,6 +14,7 @@ import {
getCustomOpenaiApiUrl, getCustomOpenaiApiUrl,
getCustomOpenaiModelName, getCustomOpenaiModelName,
} from '../config'; } from '../config';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
export const handleConnection = async ( export const handleConnection = async (
ws: WebSocket, ws: WebSocket,
@@ -42,6 +43,8 @@ export const handleConnection = async (
searchParams.get('embeddingModel') || searchParams.get('embeddingModel') ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0]; Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
const ollamaContextWindow = searchParams.get('ollamaContextWindow');
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined; let embeddings: Embeddings | undefined;
@@ -52,6 +55,9 @@ export const handleConnection = async (
) { ) {
llm = chatModelProviders[chatModelProvider][chatModel] llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined; .model as unknown as BaseChatModel | undefined;
if (llm instanceof ChatOllama) {
llm.numCtx = ollamaContextWindow ? parseInt(ollamaContextWindow) : 2048;
}
} else if (chatModelProvider == 'custom_openai') { } else if (chatModelProvider == 'custom_openai') {
const customOpenaiApiKey = getCustomOpenaiApiKey(); const customOpenaiApiKey = getCustomOpenaiApiKey();
const customOpenaiApiUrl = getCustomOpenaiApiUrl(); const customOpenaiApiUrl = getCustomOpenaiApiUrl();

View File

@@ -23,6 +23,7 @@ interface SettingsType {
customOpenaiApiKey: string; customOpenaiApiKey: string;
customOpenaiApiUrl: string; customOpenaiApiUrl: string;
customOpenaiModelName: string; customOpenaiModelName: string;
ollamaContextWindow: number;
} }
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> { interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
@@ -112,6 +113,11 @@ const Page = () => {
const [automaticImageSearch, setAutomaticImageSearch] = useState(false); const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false); const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({}); const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
const [contextWindowSize, setContextWindowSize] = useState(2048);
const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
const predefinedContextSizes = [
1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072,
];
useEffect(() => { useEffect(() => {
const fetchConfig = async () => { const fetchConfig = async () => {
@@ -123,6 +129,7 @@ const Page = () => {
}); });
const data = (await res.json()) as SettingsType; const data = (await res.json()) as SettingsType;
setConfig(data); setConfig(data);
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {}); const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
@@ -171,6 +178,13 @@ const Page = () => {
setAutomaticVideoSearch( setAutomaticVideoSearch(
localStorage.getItem('autoVideoSearch') === 'true', localStorage.getItem('autoVideoSearch') === 'true',
); );
const storedContextWindow = parseInt(
localStorage.getItem('ollamaContextWindow') ?? '2048',
);
setContextWindowSize(storedContextWindow);
setIsCustomContextWindow(
!predefinedContextSizes.includes(storedContextWindow),
);
setIsLoading(false); setIsLoading(false);
}; };
@@ -223,11 +237,11 @@ const Page = () => {
setChatModels(data.chatModelProviders || {}); setChatModels(data.chatModelProviders || {});
setEmbeddingModels(data.embeddingModelProviders || {}); setEmbeddingModels(data.embeddingModelProviders || {});
const currentProvider = selectedChatModelProvider; const currentChatProvider = selectedChatModelProvider;
const newProviders = Object.keys(data.chatModelProviders || {}); const newChatProviders = Object.keys(data.chatModelProviders || {});
if (!currentProvider && newProviders.length > 0) { if (!currentChatProvider && newChatProviders.length > 0) {
const firstProvider = newProviders[0]; const firstProvider = newChatProviders[0];
const firstModel = data.chatModelProviders[firstProvider]?.[0]?.name; const firstModel = data.chatModelProviders[firstProvider]?.[0]?.name;
if (firstModel) { if (firstModel) {
@@ -237,11 +251,11 @@ const Page = () => {
localStorage.setItem('chatModel', firstModel); localStorage.setItem('chatModel', firstModel);
} }
} else if ( } else if (
currentProvider && currentChatProvider &&
(!data.chatModelProviders || (!data.chatModelProviders ||
!data.chatModelProviders[currentProvider] || !data.chatModelProviders[currentChatProvider] ||
!Array.isArray(data.chatModelProviders[currentProvider]) || !Array.isArray(data.chatModelProviders[currentChatProvider]) ||
data.chatModelProviders[currentProvider].length === 0) data.chatModelProviders[currentChatProvider].length === 0)
) { ) {
const firstValidProvider = Object.entries( const firstValidProvider = Object.entries(
data.chatModelProviders || {}, data.chatModelProviders || {},
@@ -267,6 +281,55 @@ const Page = () => {
} }
} }
const currentEmbeddingProvider = selectedEmbeddingModelProvider;
const newEmbeddingProviders = Object.keys(
data.embeddingModelProviders || {},
);
if (!currentEmbeddingProvider && newEmbeddingProviders.length > 0) {
const firstProvider = newEmbeddingProviders[0];
const firstModel =
data.embeddingModelProviders[firstProvider]?.[0]?.name;
if (firstModel) {
setSelectedEmbeddingModelProvider(firstProvider);
setSelectedEmbeddingModel(firstModel);
localStorage.setItem('embeddingModelProvider', firstProvider);
localStorage.setItem('embeddingModel', firstModel);
}
} else if (
currentEmbeddingProvider &&
(!data.embeddingModelProviders ||
!data.embeddingModelProviders[currentEmbeddingProvider] ||
!Array.isArray(
data.embeddingModelProviders[currentEmbeddingProvider],
) ||
data.embeddingModelProviders[currentEmbeddingProvider].length === 0)
) {
const firstValidProvider = Object.entries(
data.embeddingModelProviders || {},
).find(
([_, models]) => Array.isArray(models) && models.length > 0,
)?.[0];
if (firstValidProvider) {
setSelectedEmbeddingModelProvider(firstValidProvider);
setSelectedEmbeddingModel(
data.embeddingModelProviders[firstValidProvider][0].name,
);
localStorage.setItem('embeddingModelProvider', firstValidProvider);
localStorage.setItem(
'embeddingModel',
data.embeddingModelProviders[firstValidProvider][0].name,
);
} else {
setSelectedEmbeddingModelProvider(null);
setSelectedEmbeddingModel(null);
localStorage.removeItem('embeddingModelProvider');
localStorage.removeItem('embeddingModel');
}
}
setConfig(data); setConfig(data);
} }
@@ -278,6 +341,12 @@ const Page = () => {
localStorage.setItem('chatModelProvider', value); localStorage.setItem('chatModelProvider', value);
} else if (key === 'chatModel') { } else if (key === 'chatModel') {
localStorage.setItem('chatModel', value); localStorage.setItem('chatModel', value);
} else if (key === 'embeddingModelProvider') {
localStorage.setItem('embeddingModelProvider', value);
} else if (key === 'embeddingModel') {
localStorage.setItem('embeddingModel', value);
} else if (key === 'ollamaContextWindow') {
localStorage.setItem('ollamaContextWindow', value.toString());
} }
} catch (err) { } catch (err) {
console.error('Failed to save:', err); console.error('Failed to save:', err);
@@ -436,7 +505,6 @@ const Page = () => {
const value = e.target.value; const value = e.target.value;
setSelectedChatModelProvider(value); setSelectedChatModelProvider(value);
saveConfig('chatModelProvider', value); saveConfig('chatModelProvider', value);
// Auto-select first model of new provider
const firstModel = const firstModel =
config.chatModelProviders[value]?.[0]?.name; config.chatModelProviders[value]?.[0]?.name;
if (firstModel) { if (firstModel) {
@@ -496,6 +564,78 @@ const Page = () => {
]; ];
})()} })()}
/> />
{selectedChatModelProvider === 'ollama' && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Chat Context Window Size
</p>
<Select
value={
isCustomContextWindow
? 'custom'
: contextWindowSize.toString()
}
onChange={(e) => {
const value = e.target.value;
if (value === 'custom') {
setIsCustomContextWindow(true);
} else {
setIsCustomContextWindow(false);
const numValue = parseInt(value);
setContextWindowSize(numValue);
setConfig((prev) => ({
...prev!,
ollamaContextWindow: numValue,
}));
saveConfig('ollamaContextWindow', numValue);
}
}}
options={[
...predefinedContextSizes.map((size) => ({
value: size.toString(),
label: `${size.toLocaleString()} tokens`,
})),
{ value: 'custom', label: 'Custom...' },
]}
/>
{isCustomContextWindow && (
<div className="mt-2">
<Input
type="number"
min={512}
value={contextWindowSize}
placeholder="Custom context window size (minimum 512)"
isSaving={savingStates['ollamaContextWindow']}
onChange={(e) => {
// Allow any value to be typed
const value =
parseInt(e.target.value) ||
contextWindowSize;
setContextWindowSize(value);
}}
onSave={(value) => {
// Validate only when saving
const numValue = Math.max(
512,
parseInt(value) || 2048,
);
setContextWindowSize(numValue);
setConfig((prev) => ({
...prev!,
ollamaContextWindow: numValue,
}));
saveConfig('ollamaContextWindow', numValue);
}}
/>
</div>
)}
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
{isCustomContextWindow
? 'Adjust the context window size for Ollama models (minimum 512 tokens)'
: 'Adjust the context window size for Ollama models'}
</p>
</div>
)}
</div> </div>
)} )}
</div> </div>
@@ -554,6 +694,81 @@ const Page = () => {
</div> </div>
</div> </div>
)} )}
{config.embeddingModelProviders && (
<div className="flex flex-col space-y-4 mt-4 pt-4 border-t border-light-200 dark:border-dark-200">
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Embedding Model Provider
</p>
<Select
value={selectedEmbeddingModelProvider ?? undefined}
onChange={(e) => {
const value = e.target.value;
setSelectedEmbeddingModelProvider(value);
saveConfig('embeddingModelProvider', value);
const firstModel =
config.embeddingModelProviders[value]?.[0]?.name;
if (firstModel) {
setSelectedEmbeddingModel(firstModel);
saveConfig('embeddingModel', firstModel);
}
}}
options={Object.keys(config.embeddingModelProviders).map(
(provider) => ({
value: provider,
label:
provider.charAt(0).toUpperCase() +
provider.slice(1),
}),
)}
/>
</div>
{selectedEmbeddingModelProvider && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Embedding Model
</p>
<Select
value={selectedEmbeddingModel ?? undefined}
onChange={(e) => {
const value = e.target.value;
setSelectedEmbeddingModel(value);
saveConfig('embeddingModel', value);
}}
options={(() => {
const embeddingModelProvider =
config.embeddingModelProviders[
selectedEmbeddingModelProvider
];
return embeddingModelProvider
? embeddingModelProvider.length > 0
? embeddingModelProvider.map((model) => ({
value: model.name,
label: model.displayName,
}))
: [
{
value: '',
label: 'No models available',
disabled: true,
},
]
: [
{
value: '',
label:
'Invalid provider, please check backend logs',
disabled: true,
},
];
})()}
/>
</div>
)}
</div>
)}
</SettingsSection> </SettingsSection>
<SettingsSection title="API Keys"> <SettingsSection title="API Keys">

View File

@@ -197,6 +197,11 @@ const useSocket = (
'openAIBaseURL', 'openAIBaseURL',
localStorage.getItem('openAIBaseURL')!, localStorage.getItem('openAIBaseURL')!,
); );
} else {
searchParams.append(
'ollamaContextWindow',
localStorage.getItem('ollamaContextWindow') || '2048',
);
} }
searchParams.append('embeddingModel', embeddingModel!); searchParams.append('embeddingModel', embeddingModel!);

View File

@@ -33,9 +33,10 @@ const SearchImages = ({
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey'); const customOpenAIKey = localStorage.getItem('openAIApiKey');
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/images`, `${process.env.NEXT_PUBLIC_API_URL}/images`,
@@ -54,6 +55,9 @@ const SearchImages = ({
customOpenAIBaseURL: customOpenAIBaseURL, customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey, customOpenAIKey: customOpenAIKey,
}), }),
...(chatModelProvider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
}, },
}), }),
}, },

View File

@@ -48,9 +48,10 @@ const Searchvideos = ({
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey'); const customOpenAIKey = localStorage.getItem('openAIApiKey');
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/videos`, `${process.env.NEXT_PUBLIC_API_URL}/videos`,
@@ -69,6 +70,9 @@ const Searchvideos = ({
customOpenAIBaseURL: customOpenAIBaseURL, customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey, customOpenAIKey: customOpenAIKey,
}), }),
...(chatModelProvider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
}, },
}), }),
}, },

View File

@@ -6,6 +6,8 @@ export const getSuggestions = async (chatHisory: Message[]) => {
const customOpenAIKey = localStorage.getItem('openAIApiKey'); const customOpenAIKey = localStorage.getItem('openAIApiKey');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, { const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
method: 'POST', method: 'POST',
@@ -21,6 +23,9 @@ export const getSuggestions = async (chatHisory: Message[]) => {
customOpenAIKey, customOpenAIKey,
customOpenAIBaseURL, customOpenAIBaseURL,
}), }),
...(chatModelProvider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
}, },
}), }),
}); });