Compare commits

...

20 Commits

Author SHA1 Message Date
ItzCrazyKns
1614cfa5e5 feat(app): add widgets 2025-11-20 14:55:50 +05:30
ItzCrazyKns
036b44611f feat(search): add classifier 2025-11-20 14:55:24 +05:30
ItzCrazyKns
8b515201f3 feat(app): add search types 2025-11-20 14:53:03 +05:30
ItzCrazyKns
cbcb03c7ac feat(llm): update return type to partial 2025-11-20 14:52:41 +05:30
ItzCrazyKns
afc68ca91f feat(ollamaLLM): disable thinking in obj mode 2025-11-20 14:52:24 +05:30
ItzCrazyKns
3cc8882b28 feat(prompts): add classifier prompt 2025-11-20 14:51:49 +05:30
ItzCrazyKns
c3830795cb feat(app): add new session manager 2025-11-20 14:51:17 +05:30
ItzCrazyKns
f44ad973aa feat(types): add llm types 2025-11-18 14:39:43 +05:30
ItzCrazyKns
4bcbdad6cb feat(providers): implement custom classes 2025-11-18 14:39:04 +05:30
ItzCrazyKns
5272c7fd3e feat(models): add new base classes 2025-11-18 14:38:12 +05:30
ItzCrazyKns
657a577ec8 feat(app): enhance UI 2025-11-18 14:37:41 +05:30
ItzCrazyKns
f6dac43d7a feat(types): add message & chunk type 2025-11-18 01:17:19 +05:30
ItzCrazyKns
a00f2231d4 feat(chat-window): remove loading state 2025-11-14 23:17:41 +05:30
ItzCrazyKns
1da9b7655c Merge branch 'canary' into feat/improve-search-architecture 2025-11-14 14:38:58 +05:30
ItzCrazyKns
2edef888a3 Merge branch 'master' into canary 2025-11-14 13:29:22 +05:30
ItzCrazyKns
2dc8078848 Update Exa sponsor image and README styling 2025-11-14 13:23:50 +05:30
ItzCrazyKns
8df81c20cf Update README.md 2025-11-14 13:19:49 +05:30
ItzCrazyKns
34bd02236d Update README.md 2025-11-14 13:17:52 +05:30
ItzCrazyKns
2430376a0c feat(readme): update sponsers 2025-11-14 13:15:59 +05:30
ItzCrazyKns
bd5628b390 feat(package): bump langchain package 2025-11-14 11:45:48 +05:30
34 changed files with 1311 additions and 164 deletions

BIN
.assets/sponsers/exa.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

View File

@@ -49,10 +49,29 @@ Perplexica's development is powered by the generous support of our sponsors. The
<img alt="Warp Terminal" src=".assets/sponsers/warp.png" width="100%">
</a>
**[Warp](https://www.warp.dev/perplexica)** - The AI-powered terminal revolutionizing development workflows
### **✨ [Try Warp - The AI-Powered Terminal →](https://www.warp.dev/perplexica)**
Warp is revolutionizing development workflows with AI-powered features, modern UX, and blazing-fast performance. Used by developers at top companies worldwide.
</div>
---
We'd also like to thank the following partners for their generous support:
<table>
<tr>
<td>
<a href="https://dashboard.exa.ai" target="_blank">
<img src=".assets/sponsers/exa.png" alt="Exa" style="max-width: 8rem; max-height: 8rem; border-radius: .75rem;" />
</a>
</td>
<td>
<a href="https://dashboard.exa.ai">Exa</a> • The Perfect Web Search API for LLMs - web search, crawling, deep research, and answer APIs
</td>
</tr>
</table>
## Installation
There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. Using Docker is highly recommended.

View File

@@ -16,13 +16,14 @@
"@huggingface/transformers": "^3.7.5",
"@iarna/toml": "^2.2.5",
"@icons-pack/react-simple-icons": "^12.3.0",
"@langchain/anthropic": "^1.0.0",
"@langchain/community": "^1.0.0",
"@langchain/core": "^1.0.1",
"@langchain/google-genai": "^1.0.0",
"@langchain/groq": "^1.0.0",
"@langchain/ollama": "^1.0.0",
"@langchain/openai": "^1.0.0",
"@langchain/anthropic": "^1.0.1",
"@langchain/community": "^1.0.3",
"@langchain/core": "^1.0.5",
"@langchain/google-genai": "^1.0.1",
"@langchain/groq": "^1.0.1",
"@langchain/langgraph": "^1.0.1",
"@langchain/ollama": "^1.0.1",
"@langchain/openai": "^1.1.1",
"@langchain/textsplitters": "^1.0.0",
"@tailwindcss/typography": "^0.5.12",
"axios": "^1.8.3",
@@ -33,7 +34,7 @@
"framer-motion": "^12.23.24",
"html-to-text": "^9.0.5",
"jspdf": "^3.0.1",
"langchain": "^1.0.1",
"langchain": "^1.0.4",
"lucide-react": "^0.363.0",
"mammoth": "^1.9.1",
"markdown-to-jsx": "^7.7.2",
@@ -48,7 +49,7 @@
"tailwind-merge": "^2.2.2",
"winston": "^3.17.0",
"yet-another-react-lightbox": "^3.17.2",
"zod": "^3.22.4"
"zod": "^4.1.12"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.12",

View File

@@ -8,7 +8,6 @@ import { Settings } from 'lucide-react';
import Link from 'next/link';
import NextError from 'next/error';
import { useChat } from '@/lib/hooks/useChat';
import Loader from './ui/Loader';
import SettingsButtonMobile from './Settings/SettingsButtonMobile';
export interface BaseMessage {
@@ -52,7 +51,7 @@ export interface File {
}
const ChatWindow = () => {
const { hasError, isReady, notFound, messages } = useChat();
const { hasError, notFound, messages } = useChat();
if (hasError) {
return (
<div className="relative">
@@ -68,24 +67,18 @@ const ChatWindow = () => {
);
}
return isReady ? (
notFound ? (
<NextError statusCode={404} />
) : (
<div>
{messages.length > 0 ? (
<>
<Navbar />
<Chat />
</>
) : (
<EmptyChat />
)}
</div>
)
return notFound ? (
<NextError statusCode={404} />
) : (
<div className="flex flex-row items-center justify-center min-h-screen">
<Loader />
<div>
{messages.length > 0 ? (
<>
<Navbar />
<Chat />
</>
) : (
<EmptyChat />
)}
</div>
);
};

View File

@@ -20,9 +20,9 @@ const Copy = ({
setCopied(true);
setTimeout(() => setCopied(false), 1000);
}}
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
>
{copied ? <Check size={18} /> : <ClipboardList size={18} />}
{copied ? <Check size={16} /> : <ClipboardList size={16} />}
</button>
);
};

View File

@@ -1,4 +1,4 @@
import { ArrowLeftRight } from 'lucide-react';
import { ArrowLeftRight, Repeat } from 'lucide-react';
const Rewrite = ({
rewrite,
@@ -10,12 +10,11 @@ const Rewrite = ({
return (
<button
onClick={() => rewrite(messageId)}
className="py-2 px-3 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white flex flex-row items-center space-x-1"
className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white flex flex-row items-center space-x-1"
>
<ArrowLeftRight size={18} />
<p className="text-xs font-medium">Rewrite</p>
<Repeat size={16} />
</button>
);
};
1;
export default Rewrite;

View File

@@ -10,6 +10,7 @@ import {
StopCircle,
Layers3,
Plus,
CornerDownRight,
} from 'lucide-react';
import Markdown, { MarkdownToJSX } from 'markdown-to-jsx';
import Copy from './MessageActions/Copy';
@@ -122,14 +123,14 @@ const MessageBox = ({
</Markdown>
{loading && isLast ? null : (
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4 -mx-2">
<div className="flex flex-row items-center space-x-1">
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4">
<div className="flex flex-row items-center -ml-2">
<Rewrite
rewrite={rewrite}
messageId={section.assistantMessage.messageId}
/>
</div>
<div className="flex flex-row items-center space-x-1">
<div className="flex flex-row items-center -mr-2">
<Copy
initialMessage={section.assistantMessage.content}
section={section}
@@ -142,12 +143,12 @@ const MessageBox = ({
start();
}
}}
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
>
{speechStatus === 'started' ? (
<StopCircle size={18} />
<StopCircle size={16} />
) : (
<Volume2 size={18} />
<Volume2 size={16} />
)}
</button>
</div>
@@ -159,7 +160,7 @@ const MessageBox = ({
section.suggestions.length > 0 &&
section.assistantMessage &&
!loading && (
<div className="mt-8 pt-6 border-t border-light-200/50 dark:border-dark-200/50">
<div className="mt-6">
<div className="flex flex-row items-center space-x-2 mb-4">
<Layers3
className="text-black dark:text-white"
@@ -173,20 +174,24 @@ const MessageBox = ({
{section.suggestions.map(
(suggestion: string, i: number) => (
<div key={i}>
{i > 0 && (
<div className="h-px bg-light-200/40 dark:bg-dark-200/40 mx-3" />
)}
<div className="h-px bg-light-200/40 dark:bg-dark-200/40" />
<button
onClick={() => sendMessage(suggestion)}
className="group w-full px-3 py-4 text-left transition-colors duration-200"
className="group w-full py-4 text-left transition-colors duration-200"
>
<div className="flex items-center justify-between gap-3">
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-[#24A0ED] transition-colors duration-200 leading-relaxed">
{suggestion}
</p>
<div className="flex flex-row space-x-3 items-center ">
<CornerDownRight
size={17}
className="group-hover:text-sky-400 transition-colors duration-200"
/>
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-sky-400 transition-colors duration-200 leading-relaxed">
{suggestion}
</p>
</div>
<Plus
size={16}
className="text-black/40 dark:text-white/40 group-hover:text-[#24A0ED] transition-colors duration-200 flex-shrink-0"
className="text-black/40 dark:text-white/40 group-hover:text-sky-400 transition-colors duration-200 flex-shrink-0"
/>
</div>
</button>

View File

@@ -0,0 +1,72 @@
import z from 'zod';
import { ClassifierInput, ClassifierOutput } from '../types';
import { WidgetRegistry } from '../widgets';
import { IntentRegistry } from './intents';
import { getClassifierPrompt } from '@/lib/prompts/search/classifier';
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
class Classifier {
async classify(input: ClassifierInput): Promise<ClassifierOutput> {
const availableIntents = IntentRegistry.getAvailableIntents({
sources: input.enabledSources,
});
const availableWidgets = WidgetRegistry.getAll();
const classificationSchema = z.object({
skipSearch: z
.boolean()
.describe(
'Set to true to SKIP search. Skip ONLY when: (1) widgets alone fully answer the query (e.g., weather, stocks, calculator), (2) simple greetings or writing tasks (NOT questions). Set to false for ANY question or information request.',
),
standaloneFollowUp: z
.string()
.describe(
'A self-contained, context-independent reformulation of the user\'s question. Must include all necessary context from chat history, replace pronouns with specific nouns, and be clear enough to answer without seeing the conversation. Keep the same complexity as the original question.',
),
intents: z
.array(z.enum(availableIntents.map((i) => i.name)))
.describe(
'The intent(s) that best describe how to fulfill the user\'s query. Can include multiple intents (e.g., [\'web_search\', \'widget_response\'] for \'weather in NYC and recent news\'). Always include at least one intent when applicable.',
),
widgets: z
.array(z.union(availableWidgets.map((w) => w.schema)))
.describe(
'Widgets that can display structured data to answer (fully or partially) the query. Include all applicable widgets regardless of skipSearch value.',
),
});
const classifierPrompt = getClassifierPrompt({
intentDesc: IntentRegistry.getDescriptions({
sources: input.enabledSources,
}),
widgetDesc: WidgetRegistry.getDescriptions(),
});
const res = await input.llm.generateObject<
z.infer<typeof classificationSchema>
>({
messages: [
{
role: 'system',
content: classifierPrompt,
},
{
role: 'user',
content: `<conversation>${formatChatHistoryAsString(input.chatHistory)}</conversation>\n\n<query>${input.query}</query>`,
},
],
schema: classificationSchema,
});
res.widgets = res.widgets.map((widgetConfig) => {
return {
type: widgetConfig.type,
params: widgetConfig,
};
});
return res as ClassifierOutput;
}
}
export default Classifier;

View File

@@ -0,0 +1,11 @@
import { Intent } from '../../types';
const academicSearchIntent: Intent = {
name: 'academic_search',
description:
'Use this intent to find scholarly articles, research papers, and academic resources when the user is seeking credible and authoritative information on a specific topic.',
requiresSearch: true,
enabled: (config) => config.sources.includes('academic'),
};
export default academicSearchIntent;

View File

@@ -0,0 +1,11 @@
import { Intent } from '../../types';
const discussionSearchIntent: Intent = {
name: 'discussion_search',
description:
'Use this intent to search through discussion forums, community boards, or social media platforms when the user is looking for opinions, experiences, or community-driven information on a specific topic.',
requiresSearch: true,
enabled: (config) => config.sources.includes('discussions'),
};
export default discussionSearchIntent;

View File

@@ -0,0 +1,14 @@
import academicSearchIntent from './academicSearch';
import discussionSearchIntent from './discussionSearch';
import IntentRegistry from './registry';
import webSearchIntent from './webSearch';
import widgetResponseIntent from './widgetResponse';
import writingTaskIntent from './writingTask';
IntentRegistry.register(webSearchIntent);
IntentRegistry.register(academicSearchIntent);
IntentRegistry.register(discussionSearchIntent);
IntentRegistry.register(widgetResponseIntent);
IntentRegistry.register(writingTaskIntent);
export { IntentRegistry };

View File

@@ -0,0 +1,29 @@
import { Intent, SearchAgentConfig, SearchSources } from '../../types';
class IntentRegistry {
private static intents = new Map<string, Intent>();
static register(intent: Intent) {
this.intents.set(intent.name, intent);
}
static get(name: string): Intent | undefined {
return this.intents.get(name);
}
static getAvailableIntents(config: { sources: SearchSources[] }): Intent[] {
return Array.from(
this.intents.values().filter((intent) => intent.enabled(config)),
);
}
static getDescriptions(config: { sources: SearchSources[] }): string {
const availableintnets = this.getAvailableIntents(config);
return availableintnets
.map((intent) => `${intent.name}: ${intent.description}`)
.join('\n\n');
}
}
export default IntentRegistry;

View File

@@ -0,0 +1,11 @@
import { Intent } from '../../types';
const webSearchIntent: Intent = {
name: 'web_search',
description:
'Use this intent to find current information from the web when the user is asking a question or needs up-to-date information that cannot be provided by widgets or other intents.',
requiresSearch: true,
enabled: (config) => config.sources.includes('web'),
};
export default webSearchIntent;

View File

@@ -0,0 +1,11 @@
import { Intent } from '../../types';
const widgetResponseIntent: Intent = {
name: 'widget_response',
description:
'Use this intent to respond to user queries using available widgets when the required information can be obtained from them.',
requiresSearch: false,
enabled: (config) => true,
};
export default widgetResponseIntent;

View File

@@ -0,0 +1,11 @@
import { Intent } from '../../types';
const writingTaskIntent: Intent = {
name: 'writing_task',
description:
'Use this intent to assist users with writing tasks such as drafting emails, creating documents, or generating content based on their instructions or greetings.',
requiresSearch: false,
enabled: (config) => true,
};
export default writingTaskIntent;

View File

@@ -0,0 +1,65 @@
import { EventEmitter } from 'stream';
import z from 'zod';
import BaseLLM from '../../models/base/llm';
import BaseEmbedding from '@/lib/models/base/embedding';
export type SearchSources = 'web' | 'discussions' | 'academic';
export type SearchAgentConfig = {
sources: SearchSources[];
llm: BaseLLM<any>;
embedding: BaseEmbedding<any>;
};
export type SearchAgentInput = {
chatHistory: Message[];
followUp: string;
config: SearchAgentConfig;
};
export interface Intent {
name: string;
description: string;
requiresSearch: boolean;
enabled: (config: { sources: SearchSources[] }) => boolean;
}
export type Widget<TSchema extends z.ZodObject<any> = z.ZodObject<any>> = {
name: string;
description: string;
schema: TSchema;
execute: (
params: z.infer<TSchema>,
additionalConfig: AdditionalConfig,
) => Promise<WidgetOutput>;
};
export type WidgetConfig = {
type: string;
params: Record<string, any>;
};
export type WidgetOutput = {
type: string;
data: any;
};
export type ClassifierInput = {
llm: BaseLLM<any>;
enabledSources: SearchSources[];
query: string;
chatHistory: Message[];
};
export type ClassifierOutput = {
skipSearch: boolean;
standaloneFollowUp: string;
intents: string[];
widgets: WidgetConfig[];
};
export type AdditionalConfig = {
llm: BaseLLM<any>;
embedding: BaseLLM<any>;
emitter: EventEmitter;
};

View File

@@ -0,0 +1,6 @@
import WidgetRegistry from './registry';
import weatherWidget from './weatherWidget';
WidgetRegistry.register(weatherWidget);
export { WidgetRegistry };

View File

@@ -0,0 +1,65 @@
import {
AdditionalConfig,
SearchAgentConfig,
Widget,
WidgetConfig,
WidgetOutput,
} from '../types';
class WidgetRegistry {
private static widgets = new Map<string, Widget>();
static register(widget: Widget<any>) {
this.widgets.set(widget.name, widget);
}
static get(name: string): Widget | undefined {
return this.widgets.get(name);
}
static getAll(): Widget[] {
return Array.from(this.widgets.values());
}
static getDescriptions(): string {
return Array.from(this.widgets.values())
.map((widget) => `${widget.name}: ${widget.description}`)
.join('\n\n');
}
static async execute(
name: string,
params: any,
config: AdditionalConfig,
): Promise<WidgetOutput> {
const widget = this.get(name);
if (!widget) {
throw new Error(`Widget with name ${name} not found`);
}
return widget.execute(params, config);
}
static async executeAll(
widgets: WidgetConfig[],
additionalConfig: AdditionalConfig,
): Promise<WidgetOutput[]> {
const results: WidgetOutput[] = [];
await Promise.all(
widgets.map(async (widgetConfig) => {
const output = await this.execute(
widgetConfig.type,
widgetConfig.params,
additionalConfig,
);
results.push(output);
}),
);
return results;
}
}
export default WidgetRegistry;

View File

@@ -0,0 +1,123 @@
import z from 'zod';
import { Widget } from '../types';
const WeatherWidgetSchema = z.object({
type: z.literal('weather'),
location: z
.string()
.describe(
'Human-readable location name (e.g., "New York, NY, USA", "London, UK"). Use this OR lat/lon coordinates, never both. Leave empty string if providing coordinates.',
),
lat: z
.number()
.describe(
'Latitude coordinate in decimal degrees (e.g., 40.7128). Only use when location name is empty.',
),
lon: z
.number()
.describe(
'Longitude coordinate in decimal degrees (e.g., -74.0060). Only use when location name is empty.',
),
});
const weatherWidget = {
name: 'weather',
description:
'Provides current weather information for a specified location. It can return details such as temperature, humidity, wind speed, and weather conditions. It needs either a location name or latitude/longitude coordinates to function.',
schema: WeatherWidgetSchema,
execute: async (params, _) => {
if (
params.location === '' &&
(params.lat === undefined || params.lon === undefined)
) {
throw new Error(
'Either location name or both latitude and longitude must be provided.',
);
}
if (params.location !== '') {
const openStreetMapUrl = `https://nominatim.openstreetmap.org/search?q=${encodeURIComponent(params.location)}&format=json&limit=1`;
const locationRes = await fetch(openStreetMapUrl, {
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
});
const data = await locationRes.json();
const location = data[0];
if (!location) {
throw new Error(
`Could not find coordinates for location: ${params.location}`,
);
}
const weatherRes = await fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${location.lat}&longitude=${location.lon}&current_weather=true`,
{
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
},
);
const weatherData = await weatherRes.json();
/* this is like a very simple implementation just to see the bacckend works, when we're working on the frontend, we'll return more data i guess? */
return {
type: 'weather',
data: {
location: params.location,
latitude: location.lat,
longitude: location.lon,
weather: weatherData.current_weather,
},
};
} else if (params.lat !== undefined && params.lon !== undefined) {
const [weatherRes, locationRes] = await Promise.all([
fetch(
`https://api.open-meteo.com/v1/forecast?latitude=${params.lat}&longitude=${params.lon}&current_weather=true`,
{
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
},
),
fetch(
`https://nominatim.openstreetmap.org/reverse?lat=${params.lat}&lon=${params.lon}&format=json`,
{
headers: {
'User-Agent': 'Perplexica',
'Content-Type': 'application/json',
},
},
),
]);
const weatherData = await weatherRes.json();
const locationData = await locationRes.json();
return {
type: 'weather',
data: {
location: locationData.display_name,
latitude: params.lat,
longitude: params.lon,
weather: weatherData.current_weather,
},
};
}
return {
type: 'weather',
data: null,
};
},
} satisfies Widget<typeof WeatherWidgetSchema>;
export default weatherWidget;

View File

@@ -0,0 +1,7 @@
abstract class BaseEmbedding<CONFIG> {
constructor(protected config: CONFIG) {}
abstract embedText(texts: string[]): Promise<number[][]>;
abstract embedChunks(chunks: Chunk[]): Promise<number[][]>;
}
export default BaseEmbedding;

View File

@@ -0,0 +1,22 @@
import {
GenerateObjectInput,
GenerateOptions,
GenerateTextInput,
GenerateTextOutput,
StreamTextOutput,
} from '../types';
abstract class BaseLLM<CONFIG> {
constructor(protected config: CONFIG) {}
abstract withOptions(options: GenerateOptions): this;
abstract generateText(input: GenerateTextInput): Promise<GenerateTextOutput>;
abstract streamText(
input: GenerateTextInput,
): AsyncGenerator<StreamTextOutput>;
abstract generateObject<T>(input: GenerateObjectInput): Promise<T>;
abstract streamObject<T>(
input: GenerateObjectInput,
): AsyncGenerator<Partial<T>>;
}
export default BaseLLM;

View File

@@ -1,7 +1,9 @@
import { Embeddings } from '@langchain/core/embeddings';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import { ModelList, ProviderMetadata } from '../types';
import { UIConfigField } from '@/lib/config/types';
import BaseLLM from './llm';
import BaseEmbedding from './embedding';
abstract class BaseModelProvider<CONFIG> {
constructor(
@@ -11,8 +13,8 @@ abstract class BaseModelProvider<CONFIG> {
) {}
abstract getDefaultModels(): Promise<ModelList>;
abstract getModelList(): Promise<ModelList>;
abstract loadChatModel(modelName: string): Promise<BaseChatModel>;
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>;
abstract loadChatModel(modelName: string): Promise<BaseLLM<any>>;
abstract loadEmbeddingModel(modelName: string): Promise<BaseEmbedding<any>>;
static getProviderConfigFields(): UIConfigField[] {
throw new Error('Method not implemented.');
}

View File

@@ -1,27 +1,11 @@
import { ModelProviderUISection } from '@/lib/config/types';
import { ProviderConstructor } from './baseProvider';
import { ProviderConstructor } from '../base/provider';
import OpenAIProvider from './openai';
import OllamaProvider from './ollama';
import TransformersProvider from './transformers';
import AnthropicProvider from './anthropic';
import GeminiProvider from './gemini';
import GroqProvider from './groq';
import DeepSeekProvider from './deepseek';
import LMStudioProvider from './lmstudio';
import LemonadeProvider from './lemonade';
import AimlProvider from '@/lib/models/providers/aiml';
export const providers: Record<string, ProviderConstructor<any>> = {
openai: OpenAIProvider,
ollama: OllamaProvider,
transformers: TransformersProvider,
anthropic: AnthropicProvider,
gemini: GeminiProvider,
groq: GroqProvider,
deepseek: DeepSeekProvider,
aiml: AimlProvider,
lmstudio: LMStudioProvider,
lemonade: LemonadeProvider,
};
export const getModelProvidersUIConfigSection =

View File

@@ -1,10 +1,11 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import BaseModelProvider from './baseProvider';
import { ChatOllama, OllamaEmbeddings } from '@langchain/ollama';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import BaseModelProvider from '../../base/provider';
import { Model, ModelList, ProviderMetadata } from '../../types';
import BaseLLM from '../../base/llm';
import BaseEmbedding from '../../base/embedding';
import OllamaLLM from './ollamaLLM';
import OllamaEmbedding from './ollamaEmbedding';
interface OllamaConfig {
baseURL: string;
@@ -76,7 +77,7 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
};
}
async loadChatModel(key: string): Promise<BaseChatModel> {
async loadChatModel(key: string): Promise<BaseLLM<any>> {
const modelList = await this.getModelList();
const exists = modelList.chat.find((m) => m.key === key);
@@ -87,14 +88,13 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
);
}
return new ChatOllama({
temperature: 0.7,
return new OllamaLLM({
baseURL: this.config.baseURL,
model: key,
baseUrl: this.config.baseURL,
});
}
async loadEmbeddingModel(key: string): Promise<Embeddings> {
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key);
@@ -104,9 +104,9 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
);
}
return new OllamaEmbeddings({
return new OllamaEmbedding({
model: key,
baseUrl: this.config.baseURL,
baseURL: this.config.baseURL,
});
}

View File

@@ -0,0 +1,39 @@
import { Ollama } from 'ollama';
import BaseEmbedding from '../../base/embedding';
type OllamaConfig = {
model: string;
baseURL?: string;
};
class OllamaEmbedding extends BaseEmbedding<OllamaConfig> {
ollamaClient: Ollama;
constructor(protected config: OllamaConfig) {
super(config);
this.ollamaClient = new Ollama({
host: this.config.baseURL || 'http://localhost:11434',
});
}
async embedText(texts: string[]): Promise<number[][]> {
const response = await this.ollamaClient.embed({
input: texts,
model: this.config.model,
});
return response.embeddings;
}
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
const response = await this.ollamaClient.embed({
input: chunks.map((c) => c.content),
model: this.config.model,
});
return response.embeddings;
}
}
export default OllamaEmbedding;

View File

@@ -0,0 +1,151 @@
import z from 'zod';
import BaseLLM from '../../base/llm';
import {
GenerateObjectInput,
GenerateOptions,
GenerateTextInput,
GenerateTextOutput,
StreamTextOutput,
} from '../../types';
import { Ollama } from 'ollama';
import { parse } from 'partial-json';
type OllamaConfig = {
baseURL: string;
model: string;
options?: GenerateOptions;
};
class OllamaLLM extends BaseLLM<OllamaConfig> {
ollamaClient: Ollama;
constructor(protected config: OllamaConfig) {
super(config);
this.ollamaClient = new Ollama({
host: this.config.baseURL || 'http://localhost:11434',
});
}
withOptions(options: GenerateOptions) {
this.config.options = {
...this.config.options,
...options,
};
return this;
}
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
this.withOptions(input.options || {});
const res = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
options: {
top_p: this.config.options?.topP,
temperature: this.config.options?.temperature,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
return {
content: res.message.content,
additionalInfo: {
reasoning: res.message.thinking,
},
};
}
async *streamText(
input: GenerateTextInput,
): AsyncGenerator<StreamTextOutput> {
this.withOptions(input.options || {});
const stream = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
stream: true,
options: {
top_p: this.config.options?.topP,
temperature: this.config.options?.temperature,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
for await (const chunk of stream) {
yield {
contentChunk: chunk.message.content,
done: chunk.done,
additionalInfo: {
reasoning: chunk.message.thinking,
},
};
}
}
async generateObject<T>(input: GenerateObjectInput): Promise<T> {
this.withOptions(input.options || {});
const response = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
format: z.toJSONSchema(input.schema),
think: false,
options: {
top_p: this.config.options?.topP,
temperature: 0,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
try {
return input.schema.parse(JSON.parse(response.message.content)) as T;
} catch (err) {
throw new Error(`Error parsing response from Ollama: ${err}`);
}
}
async *streamObject<T>(input: GenerateObjectInput): AsyncGenerator<T> {
let recievedObj: string = '';
this.withOptions(input.options || {});
const stream = await this.ollamaClient.chat({
model: this.config.model,
messages: input.messages,
format: z.toJSONSchema(input.schema),
stream: true,
think: false,
options: {
top_p: this.config.options?.topP,
temperature: this.config.options?.temperature,
num_predict: this.config.options?.maxTokens,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stop: this.config.options?.stopSequences,
},
});
for await (const chunk of stream) {
recievedObj += chunk.message.content;
try {
yield parse(recievedObj) as T;
} catch (err) {
console.log('Error parsing partial object from Ollama:', err);
yield {} as T;
}
}
}
}
export default OllamaLLM;

View File

@@ -1,10 +1,13 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { Model, ModelList, ProviderMetadata } from '../types';
import BaseModelProvider from './baseProvider';
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { Embeddings } from '@langchain/core/embeddings';
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import { Model, ModelList, ProviderMetadata } from '../../types';
import OpenAIEmbedding from './openaiEmbedding';
import BaseEmbedding from '../../base/embedding';
import BaseModelProvider from '../../base/provider';
import BaseLLM from '../../base/llm';
import OpenAILLM from './openaiLLM';
interface OpenAIConfig {
apiKey: string;
@@ -145,7 +148,7 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
};
}
async loadChatModel(key: string): Promise<BaseChatModel> {
async loadChatModel(key: string): Promise<BaseLLM<any>> {
const modelList = await this.getModelList();
const exists = modelList.chat.find((m) => m.key === key);
@@ -156,17 +159,14 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
);
}
return new ChatOpenAI({
return new OpenAILLM({
apiKey: this.config.apiKey,
temperature: 0.7,
model: key,
configuration: {
baseURL: this.config.baseURL,
},
baseURL: this.config.baseURL,
});
}
async loadEmbeddingModel(key: string): Promise<Embeddings> {
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key);
@@ -176,12 +176,10 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
);
}
return new OpenAIEmbeddings({
return new OpenAIEmbedding({
apiKey: this.config.apiKey,
model: key,
configuration: {
baseURL: this.config.baseURL,
},
baseURL: this.config.baseURL,
});
}

View File

@@ -0,0 +1,41 @@
import OpenAI from 'openai';
import BaseEmbedding from '../../base/embedding';
type OpenAIConfig = {
apiKey: string;
model: string;
baseURL?: string;
};
class OpenAIEmbedding extends BaseEmbedding<OpenAIConfig> {
openAIClient: OpenAI;
constructor(protected config: OpenAIConfig) {
super(config);
this.openAIClient = new OpenAI({
apiKey: config.apiKey,
baseURL: config.baseURL,
});
}
async embedText(texts: string[]): Promise<number[][]> {
const response = await this.openAIClient.embeddings.create({
model: this.config.model,
input: texts,
});
return response.data.map((embedding) => embedding.embedding);
}
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
const response = await this.openAIClient.embeddings.create({
model: this.config.model,
input: chunks.map((c) => c.content),
});
return response.data.map((embedding) => embedding.embedding);
}
}
export default OpenAIEmbedding;

View File

@@ -0,0 +1,163 @@
import OpenAI from 'openai';
import BaseLLM from '../../base/llm';
import { zodTextFormat, zodResponseFormat } from 'openai/helpers/zod';
import {
GenerateObjectInput,
GenerateOptions,
GenerateTextInput,
GenerateTextOutput,
StreamTextOutput,
} from '../../types';
import { parse } from 'partial-json';
type OpenAIConfig = {
apiKey: string;
model: string;
baseURL?: string;
options?: GenerateOptions;
};
class OpenAILLM extends BaseLLM<OpenAIConfig> {
openAIClient: OpenAI;
constructor(protected config: OpenAIConfig) {
super(config);
this.openAIClient = new OpenAI({
apiKey: this.config.apiKey,
baseURL: this.config.baseURL || 'https://api.openai.com/v1',
});
}
withOptions(options: GenerateOptions) {
this.config.options = {
...this.config.options,
...options,
};
return this;
}
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
this.withOptions(input.options || {});
const response = await this.openAIClient.chat.completions.create({
model: this.config.model,
messages: input.messages,
temperature: this.config.options?.temperature || 1.0,
top_p: this.config.options?.topP,
max_completion_tokens: this.config.options?.maxTokens,
stop: this.config.options?.stopSequences,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
});
if (response.choices && response.choices.length > 0) {
return {
content: response.choices[0].message.content!,
additionalInfo: {
finishReason: response.choices[0].finish_reason,
},
};
}
throw new Error('No response from OpenAI');
}
async *streamText(
input: GenerateTextInput,
): AsyncGenerator<StreamTextOutput> {
this.withOptions(input.options || {});
const stream = await this.openAIClient.chat.completions.create({
model: this.config.model,
messages: input.messages,
temperature: this.config.options?.temperature || 1.0,
top_p: this.config.options?.topP,
max_completion_tokens: this.config.options?.maxTokens,
stop: this.config.options?.stopSequences,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
stream: true,
});
for await (const chunk of stream) {
if (chunk.choices && chunk.choices.length > 0) {
yield {
contentChunk: chunk.choices[0].delta.content || '',
done: chunk.choices[0].finish_reason !== null,
additionalInfo: {
finishReason: chunk.choices[0].finish_reason,
},
};
}
}
}
async generateObject<T>(input: GenerateObjectInput): Promise<T> {
this.withOptions(input.options || {});
const response = await this.openAIClient.chat.completions.parse({
messages: input.messages,
model: this.config.model,
temperature: this.config.options?.temperature || 1.0,
top_p: this.config.options?.topP,
max_completion_tokens: this.config.options?.maxTokens,
stop: this.config.options?.stopSequences,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
response_format: zodResponseFormat(input.schema, 'object'),
});
if (response.choices && response.choices.length > 0) {
try {
return input.schema.parse(response.choices[0].message.parsed) as T;
} catch (err) {
throw new Error(`Error parsing response from OpenAI: ${err}`);
}
}
throw new Error('No response from OpenAI');
}
async *streamObject<T>(input: GenerateObjectInput): AsyncGenerator<T> {
let recievedObj: string = '';
this.withOptions(input.options || {});
const stream = this.openAIClient.responses.stream({
model: this.config.model,
input: input.messages,
temperature: this.config.options?.temperature || 1.0,
top_p: this.config.options?.topP,
max_completion_tokens: this.config.options?.maxTokens,
stop: this.config.options?.stopSequences,
frequency_penalty: this.config.options?.frequencyPenalty,
presence_penalty: this.config.options?.presencePenalty,
text: {
format: zodTextFormat(input.schema, 'object'),
},
});
for await (const chunk of stream) {
if (chunk.type === 'response.output_text.delta' && chunk.delta) {
recievedObj += chunk.delta;
try {
yield parse(recievedObj) as T;
} catch (err) {
console.log('Error parsing partial object from OpenAI:', err);
yield {} as T;
}
} else if (chunk.type === 'response.output_text.done' && chunk.text) {
try {
yield parse(chunk.text) as T;
} catch (err) {
throw new Error(`Error parsing response from OpenAI: ${err}`);
}
}
}
}
}
export default OpenAILLM;

View File

@@ -1,3 +1,5 @@
import z from 'zod';
type Model = {
name: string;
key: string;
@@ -25,10 +27,59 @@ type ModelWithProvider = {
providerId: string;
};
type GenerateOptions = {
temperature?: number;
maxTokens?: number;
topP?: number;
stopSequences?: string[];
frequencyPenalty?: number;
presencePenalty?: number;
};
type GenerateTextInput = {
messages: Message[];
options?: GenerateOptions;
};
type GenerateTextOutput = {
content: string;
additionalInfo?: Record<string, any>;
};
type StreamTextOutput = {
contentChunk: string;
additionalInfo?: Record<string, any>;
done?: boolean;
};
type GenerateObjectInput = {
schema: z.ZodTypeAny;
messages: Message[];
options?: GenerateOptions;
};
type GenerateObjectOutput<T> = {
object: T;
additionalInfo?: Record<string, any>;
};
type StreamObjectOutput<T> = {
objectChunk: Partial<T>;
additionalInfo?: Record<string, any>;
done?: boolean;
};
export type {
Model,
ModelList,
ProviderMetadata,
MinimalProvider,
ModelWithProvider,
GenerateOptions,
GenerateTextInput,
GenerateTextOutput,
StreamTextOutput,
GenerateObjectInput,
GenerateObjectOutput,
StreamObjectOutput,
};

View File

@@ -0,0 +1,176 @@
export const getClassifierPrompt = (input: {
intentDesc: string;
widgetDesc: string;
}) => {
return `
<role>
You are an expert query classifier for an intelligent search agent. Your task is to analyze user queries and determine the optimal way to answer them—selecting the right intent(s) and widgets.
</role>
<task>
Given a conversation history and follow-up question, you must:
1. Determine if search should be skipped (skipSearch: boolean)
2. Generate a standalone, self-contained version of the question (standaloneFollowUp: string)
3. Identify the intent(s) that describe how to fulfill the query (intent: array)
4. Select appropriate widgets (widgets: array)
</task>
<critical_decision_rule>
**THE MOST IMPORTANT RULE**: skipSearch should be TRUE only in TWO cases:
1. Widget-only queries (weather, stocks, calculator)
2. Greetings or simple writing tasks (NOT questions)
**DEFAULT TO skipSearch: false** for everything else, including:
- Any question ("what is", "how does", "explain", "tell me about")
- Any request for information or facts
- Anything you're unsure about
Ask yourself: "Is the user ASKING about something or requesting INFORMATION?"
- YES → skipSearch: false (use web_search)
- NO (just greeting or simple writing) → skipSearch: true
</critical_decision_rule>
<skip_search_decision_tree>
Follow this decision tree IN ORDER:
1. **Widget-Only Queries** → skipSearch: TRUE, intent: ['widget_response']
- Weather queries: "weather in NYC", "temperature in Paris", "is it raining in Seattle"
- Stock queries: "AAPL stock price", "how is Tesla doing", "MSFT stock"
- Calculator queries: "what is 25% of 80", "calculate 15*23", "sqrt(144)"
- These are COMPLETE answers—no search needed
2. **Writing/Greeting Tasks** → skipSearch: TRUE, intent: ['writing_task']
- ONLY for greetings and simple writing:
- Greetings: "hello", "hi", "how are you", "thanks", "goodbye"
- Simple writing needing NO facts: "write a thank you email", "draft a birthday message", "compose a poem"
- NEVER for: questions, "what is X", "how does X work", explanations, definitions, facts, code help
- If user is ASKING about something (not requesting writing), use web_search
3. **Image Display Queries** → skipSearch: FALSE, intent: ['image_preview']
- "Show me images of cats"
- "Pictures of the Eiffel Tower"
- "Visual examples of modern architecture"
- Requests for images to visualize something
4. **Widget + Additional Info** → skipSearch: FALSE, intent: ['web_search', 'widget_response']
- "weather in NYC and best things to do there"
- "AAPL stock and recent Apple news"
- "calculate my mortgage and explain how interest works"
5. **Pure Search Queries** → skipSearch: FALSE
- Default to web_search for general questions
- Use discussions_search when user explicitly mentions Reddit, forums, opinions, experiences
- Use academic_search when user explicitly mentions research, papers, studies, scientific
- Can combine multiple search intents when appropriate
6. **Fallback when web_search unavailable** → skipSearch: TRUE, intent: ['writing_task'] or []
- If no search intents are available and no widgets apply
- Set skipSearch to true and use writing_task or empty intent
</skip_search_decision_tree>
<examples>
Example 1: Widget-only query
Query: "What is the weather in New York?"
Reasoning: User wants current weather → weather widget provides this completely
Output: skipSearch: true, intent: ['widget_response'], widgets: [weather widget for New York]
Example 2: Widget-only query
Query: "AAPL stock price"
Reasoning: User wants stock price → stock_ticker widget provides this completely
Output: skipSearch: true, intent: ['widget_response'], widgets: [stock_ticker for AAPL]
Example 3: Widget + search query
Query: "What's the weather in NYC and what are some good outdoor activities?"
Reasoning: Weather widget handles weather, but outdoor activities need web search
Output: skipSearch: false, intent: ['web_search', 'widget_response'], widgets: [weather widget for NYC]
Example 4: Pure search query
Query: "What are the latest developments in AI?"
Reasoning: No widget applies, needs current web information
Output: skipSearch: false, intent: ['web_search'], widgets: []
Example 5: Writing task (greeting/simple writing only)
Query: "Write me a thank you email for a job interview"
Reasoning: Simple writing task needing no external facts → writing_task
Output: skipSearch: true, intent: ['writing_task'], widgets: []
Example 5b: Question about something - ALWAYS needs search
Query: "What is Kimi K2?"
Reasoning: User is ASKING about something → needs web search for accurate info
Output: skipSearch: false, intent: ['web_search'], widgets: []
Example 5c: Another question - needs search
Query: "Explain how photosynthesis works"
Reasoning: User is ASKING for explanation → needs web search
Output: skipSearch: false, intent: ['web_search'], widgets: []
Example 6: Image display
Query: "Show me images of cats"
Reasoning: User wants to see images → requires image search
Output: skipSearch: false, intent: ['image_preview'], widgets: []
Example 7: Multiple search sources
Query: "What does the research say about meditation benefits?"
Reasoning: Benefits from both academic papers and web articles
Output: skipSearch: false, intent: ['academic_search', 'web_search'], widgets: []
Example 8: Discussions search
Query: "What do people on Reddit think about the new iPhone?"
Reasoning: User explicitly wants forum/community opinions → discussions_search
Output: skipSearch: false, intent: ['discussions_search'], widgets: []
Example 9: Academic search only
Query: "Find scientific papers on climate change effects"
Reasoning: User explicitly wants academic/research papers
Output: skipSearch: false, intent: ['academic_search'], widgets: []
</examples>
<standalone_follow_up_guidelines>
Transform the follow-up into a self-contained question:
- Include ALL necessary context from chat history
- Replace pronouns (it, they, this, that) with specific nouns
- Replace references ("the previous one", "what you mentioned") with actual content
- Preserve the original complexity—don't over-elaborate simple questions
- The question should be answerable without seeing the conversation
</standalone_follow_up_guidelines>
<intent_selection_rules>
Available intents:
${input.intentDesc}
Rules:
- Include at least one intent when applicable
- For questions/information requests:
- Default to web_search unless user explicitly requests another source
- Use discussions_search when user mentions: Reddit, forums, opinions, experiences, "what do people think"
- Use academic_search when user mentions: research, papers, studies, scientific, scholarly
- Can combine intents (e.g., ['academic_search', 'web_search'])
- If web_search is NOT in available intents and query needs search:
- Check if discussions_search or academic_search applies
- If no search intent available and no widgets: use writing_task or empty array []
- private_search: ONLY when user provides specific URLs/documents
- widget_response: when widgets fully answer the query
- writing_task: ONLY for greetings and simple writing (never for questions)
</intent_selection_rules>
<widget_selection_rules>
Available widgets:
${input.widgetDesc}
Rules:
- Include ALL applicable widgets regardless of skipSearch value
- Each widget type can only be included once
- Widgets provide structured, real-time data that enhances any response
</widget_selection_rules>
<output_format>
Your classification must be precise and consistent:
{
"skipSearch": <true|false>,
"standaloneFollowUp": "<self-contained question>",
"intent": [<array of selected intents>],
"widgets": [<array of selected widgets>]
}
</output_format>
`;
};

45
src/lib/session.ts Normal file
View File

@@ -0,0 +1,45 @@
import { EventEmitter } from 'stream';
/* todo implement history saving and better artifact typing and handling */
class SessionManager {
private static sessions = new Map<string, SessionManager>();
readonly id: string;
private artifacts = new Map<string, Artifact>();
private emitter = new EventEmitter();
constructor() {
this.id = crypto.randomUUID();
}
static getSession(id: string): SessionManager | undefined {
return this.sessions.get(id);
}
static getAllSessions(): SessionManager[] {
return Array.from(this.sessions.values());
}
emit(event: string, data: any) {
this.emitter.emit(event, data);
}
emitArtifact(artifact: Artifact) {
this.artifacts.set(artifact.id, artifact);
this.emitter.emit('addArtifact', artifact);
}
appendToArtifact(artifactId: string, data: any) {
const artifact = this.artifacts.get(artifactId);
if (artifact) {
if (typeof artifact.data === 'string') {
artifact.data += data;
} else if (Array.isArray(artifact.data)) {
artifact.data.push(data);
} else if (typeof artifact.data === 'object') {
Object.assign(artifact.data, data);
}
this.emitter.emit('updateArtifact', artifact);
}
}
}
export default SessionManager;

15
src/lib/types.ts Normal file
View File

@@ -0,0 +1,15 @@
type Message = {
role: 'user' | 'assistant' | 'system';
content: string;
};
type Chunk = {
content: string;
metadata: Record<string, any>;
};
type Artifact = {
id: string;
type: string;
data: any;
};

141
yarn.lock
View File

@@ -746,19 +746,19 @@
"@jridgewell/resolve-uri" "^3.1.0"
"@jridgewell/sourcemap-codec" "^1.4.14"
"@langchain/anthropic@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-1.0.0.tgz#48535c5682851bf8fddcf37aa7ca78d4d93da932"
integrity sha512-Lud/FrkFmXMYW5R9y0FC+RGdgjBBVQ2JAnG3A8E1I4+sqv5JgJttw3HKRpFkyBUSyacs6LMfSn5dbJ6TT9nMiQ==
"@langchain/anthropic@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-1.0.1.tgz#a9f836b11ecbce282fc2afb8d707c52fd37711c4"
integrity sha512-yVKePAT+nNHtybyyPlWqiq6lqcoDlIuMgL9B4WMEU5gbmzL170iodiqcgcZNFQLOC1V2wCOzywq6Zr0kB24AFg==
dependencies:
"@anthropic-ai/sdk" "^0.65.0"
"@langchain/classic@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/classic/-/classic-1.0.0.tgz#acbc15eebba03499cf93e73d2c93703a3da0a46e"
integrity sha512-darZFvO5g5e3TqZ4rvZ938F94D4a34v2ZdWfyipmyu7WB4RXMshmYtWCp98o4ec3bfRD9S4+oHMmaPcnk5cs5A==
"@langchain/classic@1.0.3":
version "1.0.3"
resolved "https://registry.yarnpkg.com/@langchain/classic/-/classic-1.0.3.tgz#92482cb4cb8692407b4ecde0df312f035934472f"
integrity sha512-XyoaiJSi4y7SzrZMCb3DdDfC+M3gqIQpVH2cOCh9xQf4244jNrncpLXF/MwOJYWxzTsjfcCAHIbFJ0kSH5nqmg==
dependencies:
"@langchain/openai" "1.0.0-alpha.3"
"@langchain/openai" "1.1.1"
"@langchain/textsplitters" "1.0.0"
handlebars "^4.7.8"
js-yaml "^4.1.0"
@@ -771,24 +771,24 @@
optionalDependencies:
langsmith "^0.3.64"
"@langchain/community@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-1.0.0.tgz#8e587605b7c981882e20281aa9e644a166620145"
integrity sha512-CM4vUZHaFHq8HpWBMIWPO5bo/rmRPJ1/iaJk7s8CghkkQ0WLaZzDtoG/wJKJZMDJOUVCtZKTw+TytlGu00/9dg==
"@langchain/community@^1.0.3":
version "1.0.3"
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-1.0.3.tgz#278c82eee22ff37b120e182b07b7c23ffc6786ab"
integrity sha512-86L7qooSY8Fh5Sf2Tu/X8PvDJqvEXohyZUGusuv0XtnWGivwtecBm0vEbVPkLF07I2ZMtyAGzHJOblbveq6Nmg==
dependencies:
"@langchain/classic" "1.0.0"
"@langchain/openai" "1.0.0"
"@langchain/classic" "1.0.3"
"@langchain/openai" "1.1.1"
binary-extensions "^2.2.0"
expr-eval "^2.0.2"
flat "^5.0.2"
js-yaml "^4.1.0"
math-expression-evaluator "^2.0.0"
uuid "^10.0.0"
zod "^3.25.76 || ^4"
"@langchain/core@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-1.0.1.tgz#c2bdbdff87649fe17b2c86bf535d749ac73a586c"
integrity sha512-hVM3EkojYOk4ISJQKjLuWYSH6kyyOFlZIrLFETDA1L0Z2/Iu0q32aJawZ0FDn6rlXE8QZjBt/9OaOL36rXc05w==
"@langchain/core@^1.0.5":
version "1.0.5"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-1.0.5.tgz#1e20ecce80fa4d0b979ea05b24b879b8357d8092"
integrity sha512-9Hy/b9+j+mm0Bhnm8xD9B0KpBYTidroLrDHdbrHoMC2DqXoY2umvi1M3M/9D744qsMSaIMP0ZwFcy5YbqI/dGw==
dependencies:
"@cfworker/json-schema" "^4.0.2"
ansi-styles "^5.0.0"
@@ -802,18 +802,18 @@
uuid "^10.0.0"
zod "^3.25.76 || ^4"
"@langchain/google-genai@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-1.0.0.tgz#2785fa163788cb6214dffc1dc29fcd5bbb751493"
integrity sha512-ICUBZl/46nG6+Yhe5v7kp/2TQBGOzqEkpfKPLDeNyJ4x9OOL46xsW3ZZrHJjhGMQuR6/JMmQMTU9kLoYgsd1Tg==
"@langchain/google-genai@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-1.0.1.tgz#3601645f652f24e3beb55acc61878070b49c24ed"
integrity sha512-a9Bzaswp1P+eA2V8hAWSBypqjxmH+/zhOY1TBdalQuPQBTRH35jBMVgX3CTTAheAzBUGQtlDD4/dR9tyemDbhw==
dependencies:
"@google/generative-ai" "^0.24.0"
uuid "^11.1.0"
"@langchain/groq@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/groq/-/groq-1.0.0.tgz#413b02158761ff406238467325cd4f9fe0990f3a"
integrity sha512-6fG9MEQHNXnxgObFHSPh+BPYyTGcoDnKd+GhI9l96cpHh+QNI+IvypicRCZVSsLdqzRCFHISvBQaH+SP5vgjIw==
"@langchain/groq@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@langchain/groq/-/groq-1.0.1.tgz#7ec8822cd2f29eef4ae0f9c20f67268d1924ab96"
integrity sha512-vDQzv6A3mjG0/W/7vL4Iq+dnmhSbMHln+b7Rna810trjZzfNPZhAP6omqZyzCKIqjsQYUH4ODLnSUCNiarfYsQ==
dependencies:
groq-sdk "^0.19.0"
@@ -842,30 +842,30 @@
"@langchain/langgraph-sdk" "~1.0.0"
uuid "^10.0.0"
"@langchain/ollama@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-1.0.0.tgz#803c353e9dfb1a9e7b20f1460a6a201fec29bb77"
integrity sha512-zqn6i7haMjvZW4FQWo0GrF4wYL5mLurdL0qoe+moYWYSCGaay4K7e/4dqM5C/MR16/HPFDzFbBRMkni2PDRBgA==
"@langchain/langgraph@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-1.0.1.tgz#d0be714653e8a27665f86ea795c5c34189455406"
integrity sha512-7y8OTDLrHrpJ55Y5x7c7zU2BbqNllXwxM106Xrd+NaQB5CpEb4hbUfIwe4XmhhscKPwvhXAq3tjeUxw9MCiurQ==
dependencies:
"@langchain/langgraph-checkpoint" "^1.0.0"
"@langchain/langgraph-sdk" "~1.0.0"
uuid "^10.0.0"
"@langchain/ollama@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-1.0.1.tgz#c63ac6db65110beef4020a5e2b167ad0bc678d33"
integrity sha512-Pe32hhTpMvnRlNFJxkdu6r1QzsONGz5uvoLiMU1TpgAUu7EyKr2osymlgjBLqDe2vMKUmqHb+yWRH0IppDBUOg==
dependencies:
ollama "^0.5.12"
uuid "^10.0.0"
"@langchain/openai@1.0.0", "@langchain/openai@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.0.0.tgz#03b15312286b30ce0149f6052620c6c95b4387bc"
integrity sha512-olKEUIjb3HBOiD/NR056iGJz4wiN6HhQ/u65YmGWYadWWoKOcGwheBw/FE0x6SH4zDlI3QmP+vMhuQoaww19BQ==
"@langchain/openai@1.1.1", "@langchain/openai@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.1.1.tgz#67ddcf54ee7ac402f6b75b4b9e25447e78c56a93"
integrity sha512-0kUaXejo/sn6QAohWHDaAUapC4CJRkJIajGaWfJC+llSqpDBnmBE1oHg1M2fi1OCeP+ns9SxB6BTsq4Qbiqmig==
dependencies:
js-tiktoken "^1.0.12"
openai "^6.3.0"
zod "^3.25.76 || ^4"
"@langchain/openai@1.0.0-alpha.3":
version "1.0.0-alpha.3"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.0.0-alpha.3.tgz#35c4e770e3421b75a226087af54fbeff147e201a"
integrity sha512-re2NXLYeLatPzoB6YRoFgB1fW6i5ygcLGa7PlNOhi3f93uU1vSlWMgjkO9dcN9ALmr/bhoruqJEn7U0Eva+6/w==
dependencies:
js-tiktoken "^1.0.12"
openai "^6.3.0"
openai "^6.9.0"
zod "^3.25.76 || ^4"
"@langchain/textsplitters@1.0.0", "@langchain/textsplitters@^1.0.0":
@@ -2607,11 +2607,6 @@ expand-template@^2.0.3:
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
expr-eval@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/expr-eval/-/expr-eval-2.0.2.tgz#fa6f044a7b0c93fde830954eb9c5b0f7fbc7e201"
integrity sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg==
fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
version "3.1.3"
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
@@ -3514,17 +3509,16 @@ kuler@^2.0.0:
resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
langchain@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/langchain/-/langchain-1.0.1.tgz#fb181176f4aa443ef02e9e5b563bcb4e170dfeb6"
integrity sha512-IT4JBVbKBh2AjaUFT9OsmOfeK3UbKy3SgdzZOuvet25sAaMpAR8IaM9XVddRs+OXQqVg6sOS01KUUVCJksVhHg==
langchain@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/langchain/-/langchain-1.0.4.tgz#c4fa22d927f41d56c356ecfccea5c08ae7b682ef"
integrity sha512-g7z2kKvnXOecybbVGHfI2ZmdmP309mxC1FYlq6WC/7RsKgX5MwY9gBjwK16mpKOaozOD9QCo1Ia7o2UcUBRb9Q==
dependencies:
"@langchain/langgraph" "^1.0.0"
"@langchain/langgraph-checkpoint" "^1.0.0"
langsmith "~0.3.74"
uuid "^10.0.0"
zod "^3.25.76 || ^4"
optionalDependencies:
langsmith "^0.3.64"
langsmith@^0.3.64:
version "0.3.74"
@@ -3539,6 +3533,19 @@ langsmith@^0.3.64:
semver "^7.6.3"
uuid "^10.0.0"
langsmith@~0.3.74:
version "0.3.79"
resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.3.79.tgz#6c845644da26e7fdd8e9b80706091669fc43bda4"
integrity sha512-j5uiAsyy90zxlxaMuGjb7EdcL51Yx61SpKfDOI1nMPBbemGju+lf47he4e59Hp5K63CY8XWgFP42WeZ+zuIU4Q==
dependencies:
"@types/uuid" "^10.0.0"
chalk "^4.1.2"
console-table-printer "^2.12.1"
p-queue "^6.6.2"
p-retry "4"
semver "^7.6.3"
uuid "^10.0.0"
language-subtag-registry@^0.3.20:
version "0.3.22"
resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d"
@@ -3686,6 +3693,11 @@ matcher@^3.0.0:
dependencies:
escape-string-regexp "^4.0.0"
math-expression-evaluator@^2.0.0:
version "2.0.7"
resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-2.0.7.tgz#dc99a80ce2bf7f9b7df878126feb5c506c1fdf5f"
integrity sha512-uwliJZ6BPHRq4eiqNWxZBDzKUiS5RIynFFcgchqhBOloVLVBpZpNG8jRYkedLcBvhph8TnRyWEuxPqiQcwIdog==
math-intrinsics@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9"
@@ -4025,10 +4037,10 @@ onnxruntime-web@1.22.0-dev.20250409-89f8206ba4:
platform "^1.3.6"
protobufjs "^7.2.4"
openai@^6.3.0:
version "6.5.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-6.5.0.tgz#7dd9c4c0ca6e394c1d1e738b2000e084024685b2"
integrity sha512-bNqJ15Ijbs41KuJ2iYz/mGAruFHzQQt7zXo4EvjNLoB64aJdgn1jlMeDTsXjEg+idVYafg57QB/5Rd16oqvZ6A==
openai@^6.9.0:
version "6.9.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-6.9.0.tgz#acd15b2233c42b165981f3de8f4cfce27f844fce"
integrity sha512-n2sJRYmM+xfJ0l3OfH8eNnIyv3nQY7L08gZQu3dw6wSdfPtKAk92L83M2NIP5SS8Cl/bsBBG3yKzEOjkx0O+7A==
openapi-types@^12.1.3:
version "12.1.3"
@@ -5491,12 +5503,7 @@ yocto-queue@^0.1.0:
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
zod@^3.22.4:
version "3.22.4"
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==
"zod@^3.25.76 || ^4":
"zod@^3.25.76 || ^4", zod@^4.1.12:
version "4.1.12"
resolved "https://registry.yarnpkg.com/zod/-/zod-4.1.12.tgz#64f1ea53d00eab91853195653b5af9eee68970f0"
integrity sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==