mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-04-30 08:12:26 +00:00
Updated formatting
This commit is contained in:
@ -116,7 +116,7 @@ const Page = () => {
|
|||||||
const [contextWindowSize, setContextWindowSize] = useState(2048);
|
const [contextWindowSize, setContextWindowSize] = useState(2048);
|
||||||
const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
|
const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
|
||||||
const predefinedContextSizes = [
|
const predefinedContextSizes = [
|
||||||
1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072
|
1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072,
|
||||||
];
|
];
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@ -178,9 +178,13 @@ const Page = () => {
|
|||||||
setAutomaticVideoSearch(
|
setAutomaticVideoSearch(
|
||||||
localStorage.getItem('autoVideoSearch') === 'true',
|
localStorage.getItem('autoVideoSearch') === 'true',
|
||||||
);
|
);
|
||||||
const storedContextWindow = parseInt(localStorage.getItem('ollamaContextWindow') ?? '2048');
|
const storedContextWindow = parseInt(
|
||||||
|
localStorage.getItem('ollamaContextWindow') ?? '2048',
|
||||||
|
);
|
||||||
setContextWindowSize(storedContextWindow);
|
setContextWindowSize(storedContextWindow);
|
||||||
setIsCustomContextWindow(!predefinedContextSizes.includes(storedContextWindow));
|
setIsCustomContextWindow(
|
||||||
|
!predefinedContextSizes.includes(storedContextWindow),
|
||||||
|
);
|
||||||
|
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
@ -566,7 +570,11 @@ const Page = () => {
|
|||||||
Chat Context Window Size
|
Chat Context Window Size
|
||||||
</p>
|
</p>
|
||||||
<Select
|
<Select
|
||||||
value={isCustomContextWindow ? 'custom' : contextWindowSize.toString()}
|
value={
|
||||||
|
isCustomContextWindow
|
||||||
|
? 'custom'
|
||||||
|
: contextWindowSize.toString()
|
||||||
|
}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
const value = e.target.value;
|
const value = e.target.value;
|
||||||
if (value === 'custom') {
|
if (value === 'custom') {
|
||||||
@ -583,11 +591,11 @@ const Page = () => {
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
options={[
|
options={[
|
||||||
...predefinedContextSizes.map(size => ({
|
...predefinedContextSizes.map((size) => ({
|
||||||
value: size.toString(),
|
value: size.toString(),
|
||||||
label: `${size.toLocaleString()} tokens`
|
label: `${size.toLocaleString()} tokens`,
|
||||||
})),
|
})),
|
||||||
{ value: 'custom', label: 'Custom...' }
|
{ value: 'custom', label: 'Custom...' },
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
{isCustomContextWindow && (
|
{isCustomContextWindow && (
|
||||||
@ -600,12 +608,17 @@ const Page = () => {
|
|||||||
isSaving={savingStates['ollamaContextWindow']}
|
isSaving={savingStates['ollamaContextWindow']}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
// Allow any value to be typed
|
// Allow any value to be typed
|
||||||
const value = parseInt(e.target.value) || contextWindowSize;
|
const value =
|
||||||
|
parseInt(e.target.value) ||
|
||||||
|
contextWindowSize;
|
||||||
setContextWindowSize(value);
|
setContextWindowSize(value);
|
||||||
}}
|
}}
|
||||||
onSave={(value) => {
|
onSave={(value) => {
|
||||||
// Validate only when saving
|
// Validate only when saving
|
||||||
const numValue = Math.max(512, parseInt(value) || 2048);
|
const numValue = Math.max(
|
||||||
|
512,
|
||||||
|
parseInt(value) || 2048,
|
||||||
|
);
|
||||||
setContextWindowSize(numValue);
|
setContextWindowSize(numValue);
|
||||||
setConfig((prev) => ({
|
setConfig((prev) => ({
|
||||||
...prev!,
|
...prev!,
|
||||||
@ -617,9 +630,9 @@ const Page = () => {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
|
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
|
||||||
{isCustomContextWindow
|
{isCustomContextWindow
|
||||||
? "Adjust the context window size for Ollama models (minimum 512 tokens)"
|
? 'Adjust the context window size for Ollama models (minimum 512 tokens)'
|
||||||
: "Adjust the context window size for Ollama models"}
|
: 'Adjust the context window size for Ollama models'}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
@ -198,7 +198,10 @@ const useSocket = (
|
|||||||
localStorage.getItem('openAIBaseURL')!,
|
localStorage.getItem('openAIBaseURL')!,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
searchParams.append('ollamaContextWindow', localStorage.getItem('ollamaContextWindow') || '2048')
|
searchParams.append(
|
||||||
|
'ollamaContextWindow',
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048',
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
searchParams.append('embeddingModel', embeddingModel!);
|
searchParams.append('embeddingModel', embeddingModel!);
|
||||||
|
@ -35,7 +35,8 @@ const SearchImages = ({
|
|||||||
const chatModel = localStorage.getItem('chatModel');
|
const chatModel = localStorage.getItem('chatModel');
|
||||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||||
const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch(
|
const res = await fetch(
|
||||||
`${process.env.NEXT_PUBLIC_API_URL}/images`,
|
`${process.env.NEXT_PUBLIC_API_URL}/images`,
|
||||||
|
@ -50,7 +50,8 @@ const Searchvideos = ({
|
|||||||
const chatModel = localStorage.getItem('chatModel');
|
const chatModel = localStorage.getItem('chatModel');
|
||||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||||
const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch(
|
const res = await fetch(
|
||||||
`${process.env.NEXT_PUBLIC_API_URL}/videos`,
|
`${process.env.NEXT_PUBLIC_API_URL}/videos`,
|
||||||
|
@ -6,7 +6,8 @@ export const getSuggestions = async (chatHisory: Message[]) => {
|
|||||||
|
|
||||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||||
const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
|
const ollamaContextWindow =
|
||||||
|
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||||
|
|
||||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
|
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
Reference in New Issue
Block a user