Compare commits

...

7 Commits

Author SHA1 Message Date
ItzCrazyKns
eef6ebb924 Update Section.tsx 2025-10-19 18:33:40 +05:30
ItzCrazyKns
65975ba6fc feat(providers): add transformers provider 2025-10-19 18:32:18 +05:30
ItzCrazyKns
51629b2cca feat(chat): auto scroll, stop scrolling when scrolled back 2025-10-19 18:30:21 +05:30
ItzCrazyKns
7d71643f42 feat(app): rename model selector, fix UI 2025-10-19 18:29:32 +05:30
ItzCrazyKns
4564175822 feat(settings): add embedding model selector 2025-10-19 18:29:22 +05:30
Kushagra Srivastava
9d52d01f31 Merge pull request #901 from ItzCrazyKns/feat/config-management-model-registry
Feat/config management model registry
2025-10-19 13:58:20 +05:30
ItzCrazyKns
5abd42d46d feat(package): remove ts-node 2025-10-11 18:02:31 +05:30
10 changed files with 550 additions and 271 deletions

View File

@@ -13,6 +13,7 @@
"dependencies": { "dependencies": {
"@headlessui/react": "^2.2.0", "@headlessui/react": "^2.2.0",
"@headlessui/tailwindcss": "^0.2.2", "@headlessui/tailwindcss": "^0.2.2",
"@huggingface/transformers": "^3.7.5",
"@iarna/toml": "^2.2.5", "@iarna/toml": "^2.2.5",
"@icons-pack/react-simple-icons": "^12.3.0", "@icons-pack/react-simple-icons": "^12.3.0",
"@langchain/anthropic": "^0.3.24", "@langchain/anthropic": "^0.3.24",
@@ -24,7 +25,6 @@
"@langchain/openai": "^0.6.2", "@langchain/openai": "^0.6.2",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@tailwindcss/typography": "^0.5.12", "@tailwindcss/typography": "^0.5.12",
"@xenova/transformers": "^2.17.2",
"axios": "^1.8.3", "axios": "^1.8.3",
"better-sqlite3": "^11.9.1", "better-sqlite3": "^11.9.1",
"clsx": "^2.1.0", "clsx": "^2.1.0",
@@ -65,7 +65,6 @@
"postcss": "^8", "postcss": "^8",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"tailwindcss": "^3.3.0", "tailwindcss": "^3.3.0",
"ts-node": "^10.9.2",
"typescript": "^5" "typescript": "^5"
} }
} }

View File

@@ -31,13 +31,22 @@ const Chat = () => {
useEffect(() => { useEffect(() => {
const scroll = () => { const scroll = () => {
messageEnd.current?.scrollIntoView({ behavior: 'smooth' }); messageEnd.current?.scrollIntoView({ behavior: 'auto' });
}; };
if (chatTurns.length === 1) { if (chatTurns.length === 1) {
document.title = `${chatTurns[0].content.substring(0, 30)} - Perplexica`; document.title = `${chatTurns[0].content.substring(0, 30)} - Perplexica`;
} }
const messageEndBottom =
messageEnd.current?.getBoundingClientRect().bottom ?? 0;
const distanceFromMessageEnd = window.innerHeight - messageEndBottom;
if (distanceFromMessageEnd >= -100) {
scroll();
}
if (chatTurns[chatTurns.length - 1]?.role === 'user') { if (chatTurns[chatTurns.length - 1]?.role === 'user') {
scroll(); scroll();
} }

View File

@@ -5,8 +5,7 @@ import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization'; import Optimization from './MessageInputActions/Optimization';
import Attach from './MessageInputActions/Attach'; import Attach from './MessageInputActions/Attach';
import { useChat } from '@/lib/hooks/useChat'; import { useChat } from '@/lib/hooks/useChat';
import AttachSmall from './MessageInputActions/AttachSmall'; import ModelSelector from './MessageInputActions/ChatModelSelector';
import ModelSelector from './MessageInputActions/ModelSelector';
const EmptyChatMessageInput = () => { const EmptyChatMessageInput = () => {
const { sendMessage } = useChat(); const { sendMessage } = useChat();

View File

@@ -97,7 +97,7 @@ const ModelSelector = () => {
leaveTo="opacity-0 translate-y-1" leaveTo="opacity-0 translate-y-1"
> >
<PopoverPanel className="absolute z-10 w-[230px] sm:w-[270px] md:w-[300px] -right-4"> <PopoverPanel className="absolute z-10 w-[230px] sm:w-[270px] md:w-[300px] -right-4">
<div className="bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden"> <div className="bg-light-primary dark:bg-dark-primary max-h-[300px] sm:max-w-none border rounded-lg border-light-200 dark:border-dark-200 w-full flex flex-col shadow-lg overflow-hidden">
<div className="p-4 border-b border-light-200 dark:border-dark-200"> <div className="p-4 border-b border-light-200 dark:border-dark-200">
<div className="relative"> <div className="relative">
<Search <Search
@@ -109,7 +109,7 @@ const ModelSelector = () => {
placeholder="Search models..." placeholder="Search models..."
value={searchQuery} value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)} onChange={(e) => setSearchQuery(e.target.value)}
className="w-full pl-9 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg text-xs text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none focus:ring-2 focus:ring-sky-500/20 border border-transparent focus:border-sky-500/30 transition duration-200" className="w-full pl-9 pr-3 py-2 bg-light-secondary dark:bg-dark-secondary rounded-lg placeholder:text-sm text-sm text-black dark:text-white placeholder:text-black/40 dark:placeholder:text-white/40 focus:outline-none focus:ring-2 focus:ring-sky-500/20 border border-transparent focus:border-sky-500/30 transition duration-200"
/> />
</div> </div>
</div> </div>

View File

@@ -0,0 +1,80 @@
import Select from '@/components/ui/Select';
import { ConfigModelProvider } from '@/lib/config/types';
import { useState } from 'react';
import { toast } from 'sonner';
const ModelSelect = ({
providers,
type,
}: {
providers: ConfigModelProvider[];
type: 'chat' | 'embedding';
}) => {
const [selectedModel, setSelectedModel] = useState<string>(
`${providers[0]?.id}/${providers[0].embeddingModels[0]?.key}`,
);
const [loading, setLoading] = useState(false);
const handleSave = async (newValue: string) => {
setLoading(true);
setSelectedModel(newValue);
try {
if (type === 'chat') {
localStorage.setItem('chatModelProviderId', newValue.split('/')[0]);
localStorage.setItem('chatModelKey', newValue.split('/')[1]);
} else {
localStorage.setItem(
'embeddingModelProviderId',
newValue.split('/')[0],
);
localStorage.setItem('embeddingModelKey', newValue.split('/')[1]);
}
} catch (error) {
console.error('Error saving config:', error);
toast.error('Failed to save configuration.');
} finally {
setLoading(false);
}
};
return (
<section className="rounded-xl border border-light-200 bg-light-primary/80 p-6 transition-colors dark:border-dark-200 dark:bg-dark-primary/80">
<div className="space-y-5">
<div>
<h4 className="text-base text-black dark:text-white">
Select {type === 'chat' ? 'Chat Model' : 'Embedding Model'}
</h4>
<p className="text-xs text-black/50 dark:text-white/50">
{type === 'chat'
? 'Select the model to use for chat responses'
: 'Select the model to use for embeddings'}
</p>
</div>
<Select
value={selectedModel}
onChange={(event) => handleSave(event.target.value)}
options={
type === 'chat'
? providers.flatMap((provider) =>
provider.chatModels.map((model) => ({
value: `${provider.id}/${model.key}`,
label: `${provider.name} - ${model.name}`,
})),
)
: providers.flatMap((provider) =>
provider.embeddingModels.map((model) => ({
value: `${provider.id}/${model.key}`,
label: `${provider.name} - ${model.name}`,
})),
)
}
className="w-full rounded-lg border border-light-200 dark:border-dark-200 bg-light-primary dark:bg-dark-primary px-4 py-3 text-sm text-black/80 dark:text-white/80 placeholder:text-black/40 dark:placeholder:text-white/40 focus-visible:outline-none focus-visible:border-light-300 dark:focus-visible:border-dark-300 transition-colors disabled:cursor-not-allowed disabled:opacity-60 cursor-pointer capitalize pr-12"
loading={loading}
disabled={loading}
/>
</div>
</section>
);
};
export default ModelSelect;

View File

@@ -6,6 +6,7 @@ import {
UIConfigField, UIConfigField,
} from '@/lib/config/types'; } from '@/lib/config/types';
import ModelProvider from './ModelProvider'; import ModelProvider from './ModelProvider';
import ModelSelect from './ModelSelect';
const Models = ({ const Models = ({
fields, fields,
@@ -17,14 +18,21 @@ const Models = ({
const [providers, setProviders] = useState<ConfigModelProvider[]>(values); const [providers, setProviders] = useState<ConfigModelProvider[]>(values);
return ( return (
<div className="flex-1 space-y-6 overflow-y-auto px-6 py-6"> <div className="flex-1 space-y-6 overflow-y-auto py-6">
<div className="flex flex-row justify-between items-center"> <div className="flex flex-col px-6 gap-y-4">
<h3 className="text-sm text-black/70 dark:text-white/70">
Select models
</h3>
<ModelSelect providers={values} type="embedding" />
</div>
<div className="border-t border-light-200 dark:border-dark-200" />
<div className="flex flex-row justify-between items-center px-6 ">
<p className="text-sm text-black/70 dark:text-white/70"> <p className="text-sm text-black/70 dark:text-white/70">
Manage model provider Manage model provider
</p> </p>
<AddProvider modelProviders={fields} setProviders={setProviders} /> <AddProvider modelProviders={fields} setProviders={setProviders} />
</div> </div>
<div className="flex flex-col gap-y-4"> <div className="flex flex-col px-6 gap-y-4">
{providers.map((provider) => ( {providers.map((provider) => (
<ModelProvider <ModelProvider
key={`provider-${provider.id}`} key={`provider-${provider.id}`}

View File

@@ -1,5 +1,6 @@
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings'; import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings';
import { chunkArray } from '@langchain/core/utils/chunk_array'; import { chunkArray } from '@langchain/core/utils/chunk_array';
import { pipeline } from '@huggingface/transformers';
export interface HuggingFaceTransformersEmbeddingsParams export interface HuggingFaceTransformersEmbeddingsParams
extends EmbeddingsParams { extends EmbeddingsParams {
@@ -67,12 +68,7 @@ export class HuggingFaceTransformersEmbeddings
} }
private async runEmbedding(texts: string[]) { private async runEmbedding(texts: string[]) {
const { pipeline } = await import('@xenova/transformers'); const pipe = await pipeline('feature-extraction', this.model);
const pipe = await (this.pipelinePromise ??= pipeline(
'feature-extraction',
this.model,
));
return this.caller.call(async () => { return this.caller.call(async () => {
const output = await pipe(texts, { pooling: 'mean', normalize: true }); const output = await pipe(texts, { pooling: 'mean', normalize: true });

View File

@@ -2,10 +2,12 @@ import { ModelProviderUISection } from '@/lib/config/types';
import { ProviderConstructor } from './baseProvider'; import { ProviderConstructor } from './baseProvider';
import OpenAIProvider from './openai'; import OpenAIProvider from './openai';
import OllamaProvider from './ollama'; import OllamaProvider from './ollama';
import TransformersProvider from './transformers';
export const providers: Record<string, ProviderConstructor<any>> = { export const providers: Record<string, ProviderConstructor<any>> = {
openai: OpenAIProvider, openai: OpenAIProvider,
ollama: OllamaProvider, ollama: OllamaProvider,
transformers: TransformersProvider,
}; };
export const getModelProvidersUIConfigSection = export const getModelProvidersUIConfigSection =

View File

@@ -0,0 +1,88 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import BaseModelProvider from './baseProvider';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import { HuggingFaceTransformersEmbeddings } from '@/lib/huggingfaceTransformer';
interface TransformersConfig {}
const defaultEmbeddingModels: Model[] = [
{
name: 'all-MiniLM-L6-v2',
key: 'Xenova/all-MiniLM-L6-v2',
},
{
name: 'mxbai-embed-large-v1',
key: 'mixedbread-ai/mxbai-embed-large-v1',
},
{
name: 'nomic-embed-text-v1',
key: 'Xenova/nomic-embed-text-v1',
},
];
const providerConfigFields: UIConfigField[] = [];
class TransformersProvider extends BaseModelProvider<TransformersConfig> {
constructor(id: string, name: string, config: TransformersConfig) {
super(id, name, config);
}
async getDefaultModels(): Promise<ModelList> {
return {
embedding: [...defaultEmbeddingModels],
chat: [],
};
}
async getModelList(): Promise<ModelList> {
const defaultModels = await this.getDefaultModels();
const configProvider = getConfiguredModelProviderById(this.id)!;
return {
embedding: [
...defaultModels.embedding,
...configProvider.embeddingModels,
],
chat: [],
};
}
async loadChatModel(key: string): Promise<BaseChatModel> {
throw new Error('Transformers Provider does not support chat models.');
}
async loadEmbeddingModel(key: string): Promise<Embeddings> {
const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key);
if (!exists) {
throw new Error(
'Error Loading OpenAI Embedding Model. Invalid Model Selected.',
);
}
return new HuggingFaceTransformersEmbeddings({
model: key,
});
}
static parseAndValidate(raw: any): TransformersConfig {
return {};
}
static getProviderConfigFields(): UIConfigField[] {
return providerConfigFields;
}
static getProviderMetadata(): ProviderMetadata {
return {
key: 'transformers',
name: 'Transformers',
};
}
}
export default TransformersProvider;

608
yarn.lock

File diff suppressed because it is too large Load Diff