mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-22 21:18:15 +00:00
Compare commits
15 Commits
canary
...
f44ad973aa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f44ad973aa | ||
|
|
4bcbdad6cb | ||
|
|
5272c7fd3e | ||
|
|
657a577ec8 | ||
|
|
f6dac43d7a | ||
|
|
a00f2231d4 | ||
|
|
1da9b7655c | ||
|
|
bd5628b390 | ||
|
|
3d5d04eda0 | ||
|
|
07a17925b1 | ||
|
|
3bcf646af1 | ||
|
|
e499c0b96e | ||
|
|
33b736e1e8 | ||
|
|
41fe009847 | ||
|
|
1a8889c71c |
19
package.json
19
package.json
@@ -16,13 +16,14 @@
|
|||||||
"@huggingface/transformers": "^3.7.5",
|
"@huggingface/transformers": "^3.7.5",
|
||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@icons-pack/react-simple-icons": "^12.3.0",
|
"@icons-pack/react-simple-icons": "^12.3.0",
|
||||||
"@langchain/anthropic": "^1.0.0",
|
"@langchain/anthropic": "^1.0.1",
|
||||||
"@langchain/community": "^1.0.0",
|
"@langchain/community": "^1.0.3",
|
||||||
"@langchain/core": "^1.0.1",
|
"@langchain/core": "^1.0.5",
|
||||||
"@langchain/google-genai": "^1.0.0",
|
"@langchain/google-genai": "^1.0.1",
|
||||||
"@langchain/groq": "^1.0.0",
|
"@langchain/groq": "^1.0.1",
|
||||||
"@langchain/ollama": "^1.0.0",
|
"@langchain/langgraph": "^1.0.1",
|
||||||
"@langchain/openai": "^1.0.0",
|
"@langchain/ollama": "^1.0.1",
|
||||||
|
"@langchain/openai": "^1.1.1",
|
||||||
"@langchain/textsplitters": "^1.0.0",
|
"@langchain/textsplitters": "^1.0.0",
|
||||||
"@tailwindcss/typography": "^0.5.12",
|
"@tailwindcss/typography": "^0.5.12",
|
||||||
"axios": "^1.8.3",
|
"axios": "^1.8.3",
|
||||||
@@ -33,7 +34,7 @@
|
|||||||
"framer-motion": "^12.23.24",
|
"framer-motion": "^12.23.24",
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"jspdf": "^3.0.1",
|
"jspdf": "^3.0.1",
|
||||||
"langchain": "^1.0.1",
|
"langchain": "^1.0.4",
|
||||||
"lucide-react": "^0.363.0",
|
"lucide-react": "^0.363.0",
|
||||||
"mammoth": "^1.9.1",
|
"mammoth": "^1.9.1",
|
||||||
"markdown-to-jsx": "^7.7.2",
|
"markdown-to-jsx": "^7.7.2",
|
||||||
@@ -48,7 +49,7 @@
|
|||||||
"tailwind-merge": "^2.2.2",
|
"tailwind-merge": "^2.2.2",
|
||||||
"winston": "^3.17.0",
|
"winston": "^3.17.0",
|
||||||
"yet-another-react-lightbox": "^3.17.2",
|
"yet-another-react-lightbox": "^3.17.2",
|
||||||
"zod": "^3.22.4"
|
"zod": "^4.1.12"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/better-sqlite3": "^7.6.12",
|
"@types/better-sqlite3": "^7.6.12",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import handleImageSearch from '@/lib/chains/imageSearchAgent';
|
import searchImages from '@/lib/agents/media/image';
|
||||||
import ModelRegistry from '@/lib/models/registry';
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
import { ModelWithProvider } from '@/lib/models/types';
|
import { ModelWithProvider } from '@/lib/models/types';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
@@ -13,6 +13,13 @@ export const POST = async (req: Request) => {
|
|||||||
try {
|
try {
|
||||||
const body: ImageSearchBody = await req.json();
|
const body: ImageSearchBody = await req.json();
|
||||||
|
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const llm = await registry.loadChatModel(
|
||||||
|
body.chatModel.providerId,
|
||||||
|
body.chatModel.key,
|
||||||
|
);
|
||||||
|
|
||||||
const chatHistory = body.chatHistory
|
const chatHistory = body.chatHistory
|
||||||
.map((msg: any) => {
|
.map((msg: any) => {
|
||||||
if (msg.role === 'user') {
|
if (msg.role === 'user') {
|
||||||
@@ -23,16 +30,9 @@ export const POST = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
const images = await searchImages(
|
||||||
|
|
||||||
const llm = await registry.loadChatModel(
|
|
||||||
body.chatModel.providerId,
|
|
||||||
body.chatModel.key,
|
|
||||||
);
|
|
||||||
|
|
||||||
const images = await handleImageSearch(
|
|
||||||
{
|
{
|
||||||
chat_history: chatHistory,
|
chatHistory: chatHistory,
|
||||||
query: body.query,
|
query: body.query,
|
||||||
},
|
},
|
||||||
llm,
|
llm,
|
||||||
|
|||||||
@@ -30,12 +30,6 @@ export const POST = async (req: Request) => {
|
|||||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||||
body.stream = body.stream || false;
|
body.stream = body.stream || false;
|
||||||
|
|
||||||
const history: BaseMessage[] = body.history.map((msg) => {
|
|
||||||
return msg[0] === 'human'
|
|
||||||
? new HumanMessage({ content: msg[1] })
|
|
||||||
: new AIMessage({ content: msg[1] });
|
|
||||||
});
|
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
const [llm, embeddings] = await Promise.all([
|
const [llm, embeddings] = await Promise.all([
|
||||||
@@ -46,6 +40,12 @@ export const POST = async (req: Request) => {
|
|||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
const history: BaseMessage[] = body.history.map((msg) => {
|
||||||
|
return msg[0] === 'human'
|
||||||
|
? new HumanMessage({ content: msg[1] })
|
||||||
|
: new AIMessage({ content: msg[1] });
|
||||||
|
});
|
||||||
|
|
||||||
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
||||||
|
|
||||||
if (!searchHandler) {
|
if (!searchHandler) {
|
||||||
@@ -128,7 +128,7 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
controller.close();
|
controller.close();
|
||||||
} catch (error) {}
|
} catch (error) { }
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('data', (data: string) => {
|
emitter.on('data', (data: string) => {
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent';
|
import generateSuggestions from '@/lib/agents/suggestions';
|
||||||
import ModelRegistry from '@/lib/models/registry';
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
import { ModelWithProvider } from '@/lib/models/types';
|
import { ModelWithProvider } from '@/lib/models/types';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
|
|
||||||
interface SuggestionsGenerationBody {
|
interface SuggestionsGenerationBody {
|
||||||
@@ -13,6 +12,13 @@ export const POST = async (req: Request) => {
|
|||||||
try {
|
try {
|
||||||
const body: SuggestionsGenerationBody = await req.json();
|
const body: SuggestionsGenerationBody = await req.json();
|
||||||
|
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const llm = await registry.loadChatModel(
|
||||||
|
body.chatModel.providerId,
|
||||||
|
body.chatModel.key,
|
||||||
|
);
|
||||||
|
|
||||||
const chatHistory = body.chatHistory
|
const chatHistory = body.chatHistory
|
||||||
.map((msg: any) => {
|
.map((msg: any) => {
|
||||||
if (msg.role === 'user') {
|
if (msg.role === 'user') {
|
||||||
@@ -23,16 +29,9 @@ export const POST = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
|
||||||
|
|
||||||
const llm = await registry.loadChatModel(
|
|
||||||
body.chatModel.providerId,
|
|
||||||
body.chatModel.key,
|
|
||||||
);
|
|
||||||
|
|
||||||
const suggestions = await generateSuggestions(
|
const suggestions = await generateSuggestions(
|
||||||
{
|
{
|
||||||
chat_history: chatHistory,
|
chatHistory,
|
||||||
},
|
},
|
||||||
llm,
|
llm,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import handleVideoSearch from '@/lib/chains/videoSearchAgent';
|
import handleVideoSearch from '@/lib/agents/media/video';
|
||||||
import ModelRegistry from '@/lib/models/registry';
|
import ModelRegistry from '@/lib/models/registry';
|
||||||
import { ModelWithProvider } from '@/lib/models/types';
|
import { ModelWithProvider } from '@/lib/models/types';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
@@ -13,6 +13,13 @@ export const POST = async (req: Request) => {
|
|||||||
try {
|
try {
|
||||||
const body: VideoSearchBody = await req.json();
|
const body: VideoSearchBody = await req.json();
|
||||||
|
|
||||||
|
const registry = new ModelRegistry();
|
||||||
|
|
||||||
|
const llm = await registry.loadChatModel(
|
||||||
|
body.chatModel.providerId,
|
||||||
|
body.chatModel.key,
|
||||||
|
);
|
||||||
|
|
||||||
const chatHistory = body.chatHistory
|
const chatHistory = body.chatHistory
|
||||||
.map((msg: any) => {
|
.map((msg: any) => {
|
||||||
if (msg.role === 'user') {
|
if (msg.role === 'user') {
|
||||||
@@ -23,16 +30,9 @@ export const POST = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||||
|
|
||||||
const registry = new ModelRegistry();
|
|
||||||
|
|
||||||
const llm = await registry.loadChatModel(
|
|
||||||
body.chatModel.providerId,
|
|
||||||
body.chatModel.key,
|
|
||||||
);
|
|
||||||
|
|
||||||
const videos = await handleVideoSearch(
|
const videos = await handleVideoSearch(
|
||||||
{
|
{
|
||||||
chat_history: chatHistory,
|
chatHistory: chatHistory,
|
||||||
query: body.query,
|
query: body.query,
|
||||||
},
|
},
|
||||||
llm,
|
llm,
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import { Settings } from 'lucide-react';
|
|||||||
import Link from 'next/link';
|
import Link from 'next/link';
|
||||||
import NextError from 'next/error';
|
import NextError from 'next/error';
|
||||||
import { useChat } from '@/lib/hooks/useChat';
|
import { useChat } from '@/lib/hooks/useChat';
|
||||||
import Loader from './ui/Loader';
|
|
||||||
import SettingsButtonMobile from './Settings/SettingsButtonMobile';
|
import SettingsButtonMobile from './Settings/SettingsButtonMobile';
|
||||||
|
|
||||||
export interface BaseMessage {
|
export interface BaseMessage {
|
||||||
@@ -52,7 +51,7 @@ export interface File {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const ChatWindow = () => {
|
const ChatWindow = () => {
|
||||||
const { hasError, isReady, notFound, messages } = useChat();
|
const { hasError, notFound, messages } = useChat();
|
||||||
if (hasError) {
|
if (hasError) {
|
||||||
return (
|
return (
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
@@ -68,8 +67,7 @@ const ChatWindow = () => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return isReady ? (
|
return notFound ? (
|
||||||
notFound ? (
|
|
||||||
<NextError statusCode={404} />
|
<NextError statusCode={404} />
|
||||||
) : (
|
) : (
|
||||||
<div>
|
<div>
|
||||||
@@ -82,11 +80,6 @@ const ChatWindow = () => {
|
|||||||
<EmptyChat />
|
<EmptyChat />
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)
|
|
||||||
) : (
|
|
||||||
<div className="flex flex-row items-center justify-center min-h-screen">
|
|
||||||
<Loader />
|
|
||||||
</div>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -20,9 +20,9 @@ const Copy = ({
|
|||||||
setCopied(true);
|
setCopied(true);
|
||||||
setTimeout(() => setCopied(false), 1000);
|
setTimeout(() => setCopied(false), 1000);
|
||||||
}}
|
}}
|
||||||
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
||||||
>
|
>
|
||||||
{copied ? <Check size={18} /> : <ClipboardList size={18} />}
|
{copied ? <Check size={16} /> : <ClipboardList size={16} />}
|
||||||
</button>
|
</button>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { ArrowLeftRight } from 'lucide-react';
|
import { ArrowLeftRight, Repeat } from 'lucide-react';
|
||||||
|
|
||||||
const Rewrite = ({
|
const Rewrite = ({
|
||||||
rewrite,
|
rewrite,
|
||||||
@@ -10,12 +10,11 @@ const Rewrite = ({
|
|||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
onClick={() => rewrite(messageId)}
|
onClick={() => rewrite(messageId)}
|
||||||
className="py-2 px-3 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white flex flex-row items-center space-x-1"
|
className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white flex flex-row items-center space-x-1"
|
||||||
>
|
>
|
||||||
<ArrowLeftRight size={18} />
|
<Repeat size={16} />
|
||||||
<p className="text-xs font-medium">Rewrite</p>
|
|
||||||
</button>
|
</button>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
1;
|
||||||
export default Rewrite;
|
export default Rewrite;
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import {
|
|||||||
StopCircle,
|
StopCircle,
|
||||||
Layers3,
|
Layers3,
|
||||||
Plus,
|
Plus,
|
||||||
|
CornerDownRight,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import Markdown, { MarkdownToJSX } from 'markdown-to-jsx';
|
import Markdown, { MarkdownToJSX } from 'markdown-to-jsx';
|
||||||
import Copy from './MessageActions/Copy';
|
import Copy from './MessageActions/Copy';
|
||||||
@@ -122,14 +123,14 @@ const MessageBox = ({
|
|||||||
</Markdown>
|
</Markdown>
|
||||||
|
|
||||||
{loading && isLast ? null : (
|
{loading && isLast ? null : (
|
||||||
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4 -mx-2">
|
<div className="flex flex-row items-center justify-between w-full text-black dark:text-white py-4">
|
||||||
<div className="flex flex-row items-center space-x-1">
|
<div className="flex flex-row items-center -ml-2">
|
||||||
<Rewrite
|
<Rewrite
|
||||||
rewrite={rewrite}
|
rewrite={rewrite}
|
||||||
messageId={section.assistantMessage.messageId}
|
messageId={section.assistantMessage.messageId}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-row items-center space-x-1">
|
<div className="flex flex-row items-center -mr-2">
|
||||||
<Copy
|
<Copy
|
||||||
initialMessage={section.assistantMessage.content}
|
initialMessage={section.assistantMessage.content}
|
||||||
section={section}
|
section={section}
|
||||||
@@ -142,12 +143,12 @@ const MessageBox = ({
|
|||||||
start();
|
start();
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
className="p-2 text-black/70 dark:text-white/70 rounded-full hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
||||||
>
|
>
|
||||||
{speechStatus === 'started' ? (
|
{speechStatus === 'started' ? (
|
||||||
<StopCircle size={18} />
|
<StopCircle size={16} />
|
||||||
) : (
|
) : (
|
||||||
<Volume2 size={18} />
|
<Volume2 size={16} />
|
||||||
)}
|
)}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
@@ -159,7 +160,7 @@ const MessageBox = ({
|
|||||||
section.suggestions.length > 0 &&
|
section.suggestions.length > 0 &&
|
||||||
section.assistantMessage &&
|
section.assistantMessage &&
|
||||||
!loading && (
|
!loading && (
|
||||||
<div className="mt-8 pt-6 border-t border-light-200/50 dark:border-dark-200/50">
|
<div className="mt-6">
|
||||||
<div className="flex flex-row items-center space-x-2 mb-4">
|
<div className="flex flex-row items-center space-x-2 mb-4">
|
||||||
<Layers3
|
<Layers3
|
||||||
className="text-black dark:text-white"
|
className="text-black dark:text-white"
|
||||||
@@ -173,20 +174,24 @@ const MessageBox = ({
|
|||||||
{section.suggestions.map(
|
{section.suggestions.map(
|
||||||
(suggestion: string, i: number) => (
|
(suggestion: string, i: number) => (
|
||||||
<div key={i}>
|
<div key={i}>
|
||||||
{i > 0 && (
|
<div className="h-px bg-light-200/40 dark:bg-dark-200/40" />
|
||||||
<div className="h-px bg-light-200/40 dark:bg-dark-200/40 mx-3" />
|
|
||||||
)}
|
|
||||||
<button
|
<button
|
||||||
onClick={() => sendMessage(suggestion)}
|
onClick={() => sendMessage(suggestion)}
|
||||||
className="group w-full px-3 py-4 text-left transition-colors duration-200"
|
className="group w-full py-4 text-left transition-colors duration-200"
|
||||||
>
|
>
|
||||||
<div className="flex items-center justify-between gap-3">
|
<div className="flex items-center justify-between gap-3">
|
||||||
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-[#24A0ED] transition-colors duration-200 leading-relaxed">
|
<div className="flex flex-row space-x-3 items-center ">
|
||||||
|
<CornerDownRight
|
||||||
|
size={17}
|
||||||
|
className="group-hover:text-sky-400 transition-colors duration-200"
|
||||||
|
/>
|
||||||
|
<p className="text-sm text-black/70 dark:text-white/70 group-hover:text-sky-400 transition-colors duration-200 leading-relaxed">
|
||||||
{suggestion}
|
{suggestion}
|
||||||
</p>
|
</p>
|
||||||
|
</div>
|
||||||
<Plus
|
<Plus
|
||||||
size={16}
|
size={16}
|
||||||
className="text-black/40 dark:text-white/40 group-hover:text-[#24A0ED] transition-colors duration-200 flex-shrink-0"
|
className="text-black/40 dark:text-white/40 group-hover:text-sky-400 transition-colors duration-200 flex-shrink-0"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</button>
|
</button>
|
||||||
@@ -205,11 +210,11 @@ const MessageBox = ({
|
|||||||
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
||||||
<SearchImages
|
<SearchImages
|
||||||
query={section.userMessage.content}
|
query={section.userMessage.content}
|
||||||
chatHistory={chatTurns.slice(0, sectionIndex * 2)}
|
chatHistory={chatTurns}
|
||||||
messageId={section.assistantMessage.messageId}
|
messageId={section.assistantMessage.messageId}
|
||||||
/>
|
/>
|
||||||
<SearchVideos
|
<SearchVideos
|
||||||
chatHistory={chatTurns.slice(0, sectionIndex * 2)}
|
chatHistory={chatTurns}
|
||||||
query={section.userMessage.content}
|
query={section.userMessage.content}
|
||||||
messageId={section.assistantMessage.messageId}
|
messageId={section.assistantMessage.messageId}
|
||||||
/>
|
/>
|
||||||
|
|||||||
65
src/lib/agents/media/image.ts
Normal file
65
src/lib/agents/media/image.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/* I don't think can be classified as agents but to keep the structure consistent i guess ill keep it here */
|
||||||
|
|
||||||
|
import {
|
||||||
|
RunnableSequence,
|
||||||
|
RunnableMap,
|
||||||
|
RunnableLambda,
|
||||||
|
} from '@langchain/core/runnables';
|
||||||
|
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
||||||
|
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
|
||||||
|
import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||||
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
|
import { searchSearxng } from '@/lib/searxng';
|
||||||
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import LineOutputParser from '@/lib/outputParsers/lineOutputParser';
|
||||||
|
import { imageSearchFewShots, imageSearchPrompt } from '@/lib/prompts/media/image';
|
||||||
|
|
||||||
|
type ImageSearchChainInput = {
|
||||||
|
chatHistory: BaseMessage[];
|
||||||
|
query: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ImageSearchResult = {
|
||||||
|
img_src: string;
|
||||||
|
url: string;
|
||||||
|
title: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputParser = new LineOutputParser({
|
||||||
|
key: 'query',
|
||||||
|
})
|
||||||
|
|
||||||
|
const searchImages = async (
|
||||||
|
input: ImageSearchChainInput,
|
||||||
|
llm: BaseChatModel,
|
||||||
|
) => {
|
||||||
|
const chatPrompt = await ChatPromptTemplate.fromMessages([
|
||||||
|
new SystemMessage(imageSearchPrompt),
|
||||||
|
...imageSearchFewShots,
|
||||||
|
new HumanMessage(`<conversation>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`)
|
||||||
|
]).formatMessages({})
|
||||||
|
|
||||||
|
const res = await llm.invoke(chatPrompt)
|
||||||
|
|
||||||
|
const query = await outputParser.invoke(res)
|
||||||
|
|
||||||
|
const searchRes = await searchSearxng(query!, {
|
||||||
|
engines: ['bing images', 'google images'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const images: ImageSearchResult[] = [];
|
||||||
|
|
||||||
|
searchRes.results.forEach((result) => {
|
||||||
|
if (result.img_src && result.url && result.title) {
|
||||||
|
images.push({
|
||||||
|
img_src: result.img_src,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return images.slice(0, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default searchImages;
|
||||||
65
src/lib/agents/media/video.ts
Normal file
65
src/lib/agents/media/video.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
||||||
|
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
|
||||||
|
import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||||
|
import { searchSearxng } from '@/lib/searxng';
|
||||||
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import LineOutputParser from '@/lib/outputParsers/lineOutputParser';
|
||||||
|
import { videoSearchFewShots, videoSearchPrompt } from '@/lib/prompts/media/videos';
|
||||||
|
|
||||||
|
type VideoSearchChainInput = {
|
||||||
|
chatHistory: BaseMessage[];
|
||||||
|
query: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type VideoSearchResult = {
|
||||||
|
img_src: string;
|
||||||
|
url: string;
|
||||||
|
title: string;
|
||||||
|
iframe_src: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputParser = new LineOutputParser({
|
||||||
|
key: 'query',
|
||||||
|
});
|
||||||
|
|
||||||
|
const searchVideos = async (
|
||||||
|
input: VideoSearchChainInput,
|
||||||
|
llm: BaseChatModel,
|
||||||
|
) => {
|
||||||
|
const chatPrompt = await ChatPromptTemplate.fromMessages([
|
||||||
|
new SystemMessage(videoSearchPrompt),
|
||||||
|
...videoSearchFewShots,
|
||||||
|
new HumanMessage(`<conversation>${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`)
|
||||||
|
]).formatMessages({})
|
||||||
|
|
||||||
|
const res = await llm.invoke(chatPrompt)
|
||||||
|
|
||||||
|
const query = await outputParser.invoke(res)
|
||||||
|
|
||||||
|
const searchRes = await searchSearxng(query!, {
|
||||||
|
engines: ['youtube'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const videos: VideoSearchResult[] = [];
|
||||||
|
|
||||||
|
searchRes.results.forEach((result) => {
|
||||||
|
if (
|
||||||
|
result.thumbnail &&
|
||||||
|
result.url &&
|
||||||
|
result.title &&
|
||||||
|
result.iframe_src
|
||||||
|
) {
|
||||||
|
videos.push({
|
||||||
|
img_src: result.thumbnail,
|
||||||
|
url: result.url,
|
||||||
|
title: result.title,
|
||||||
|
iframe_src: result.iframe_src,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return videos.slice(0, 10);
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
export default searchVideos;
|
||||||
32
src/lib/agents/suggestions/index.ts
Normal file
32
src/lib/agents/suggestions/index.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import ListLineOutputParser from '@/lib/outputParsers/listLineOutputParser';
|
||||||
|
import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts';
|
||||||
|
import formatChatHistoryAsString from '@/lib/utils/formatHistory';
|
||||||
|
import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
|
||||||
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import { suggestionGeneratorPrompt } from '@/lib/prompts/suggestions';
|
||||||
|
|
||||||
|
type SuggestionGeneratorInput = {
|
||||||
|
chatHistory: BaseMessage[];
|
||||||
|
};
|
||||||
|
|
||||||
|
const outputParser = new ListLineOutputParser({
|
||||||
|
key: 'suggestions',
|
||||||
|
});
|
||||||
|
|
||||||
|
const generateSuggestions = async (
|
||||||
|
input: SuggestionGeneratorInput,
|
||||||
|
llm: BaseChatModel,
|
||||||
|
) => {
|
||||||
|
const chatPrompt = await ChatPromptTemplate.fromMessages([
|
||||||
|
new SystemMessage(suggestionGeneratorPrompt),
|
||||||
|
new HumanMessage(`<conversation>${formatChatHistoryAsString(input.chatHistory)}</conversation>`)
|
||||||
|
]).formatMessages({})
|
||||||
|
|
||||||
|
const res = await llm.invoke(chatPrompt)
|
||||||
|
|
||||||
|
const suggestions = await outputParser.invoke(res)
|
||||||
|
|
||||||
|
return suggestions
|
||||||
|
};
|
||||||
|
|
||||||
|
export default generateSuggestions;
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import {
|
|
||||||
RunnableSequence,
|
|
||||||
RunnableMap,
|
|
||||||
RunnableLambda,
|
|
||||||
} from '@langchain/core/runnables';
|
|
||||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
|
||||||
import { searchSearxng } from '../searxng';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
|
||||||
|
|
||||||
const imageSearchChainPrompt = `
|
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
|
||||||
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
|
||||||
`;
|
|
||||||
|
|
||||||
type ImageSearchChainInput = {
|
|
||||||
chat_history: BaseMessage[];
|
|
||||||
query: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
interface ImageSearchResult {
|
|
||||||
img_src: string;
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strParser = new StringOutputParser();
|
|
||||||
|
|
||||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
|
||||||
return RunnableSequence.from([
|
|
||||||
RunnableMap.from({
|
|
||||||
chat_history: (input: ImageSearchChainInput) => {
|
|
||||||
return formatChatHistoryAsString(input.chat_history);
|
|
||||||
},
|
|
||||||
query: (input: ImageSearchChainInput) => {
|
|
||||||
return input.query;
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
ChatPromptTemplate.fromMessages([
|
|
||||||
['system', imageSearchChainPrompt],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>A cat</query>'],
|
|
||||||
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>Car working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>AC working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
|
|
||||||
],
|
|
||||||
]),
|
|
||||||
llm,
|
|
||||||
strParser,
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const queryParser = new LineOutputParser({
|
|
||||||
key: 'query',
|
|
||||||
});
|
|
||||||
|
|
||||||
return await queryParser.parse(input);
|
|
||||||
}),
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const res = await searchSearxng(input, {
|
|
||||||
engines: ['bing images', 'google images'],
|
|
||||||
});
|
|
||||||
|
|
||||||
const images: ImageSearchResult[] = [];
|
|
||||||
|
|
||||||
res.results.forEach((result) => {
|
|
||||||
if (result.img_src && result.url && result.title) {
|
|
||||||
images.push({
|
|
||||||
img_src: result.img_src,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return images.slice(0, 10);
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleImageSearch = (
|
|
||||||
input: ImageSearchChainInput,
|
|
||||||
llm: BaseChatModel,
|
|
||||||
) => {
|
|
||||||
const imageSearchChain = createImageSearchChain(llm);
|
|
||||||
return imageSearchChain.invoke(input);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default handleImageSearch;
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
import { RunnableSequence, RunnableMap } from '@langchain/core/runnables';
|
|
||||||
import ListLineOutputParser from '../outputParsers/listLineOutputParser';
|
|
||||||
import { PromptTemplate } from '@langchain/core/prompts';
|
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
|
||||||
|
|
||||||
const suggestionGeneratorPrompt = `
|
|
||||||
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
|
|
||||||
You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information.
|
|
||||||
Make sure the suggestions are medium in length and are informative and relevant to the conversation.
|
|
||||||
|
|
||||||
Provide these suggestions separated by newlines between the XML tags <suggestions> and </suggestions>. For example:
|
|
||||||
|
|
||||||
<suggestions>
|
|
||||||
Tell me more about SpaceX and their recent projects
|
|
||||||
What is the latest news on SpaceX?
|
|
||||||
Who is the CEO of SpaceX?
|
|
||||||
</suggestions>
|
|
||||||
|
|
||||||
Conversation:
|
|
||||||
{chat_history}
|
|
||||||
`;
|
|
||||||
|
|
||||||
type SuggestionGeneratorInput = {
|
|
||||||
chat_history: BaseMessage[];
|
|
||||||
};
|
|
||||||
|
|
||||||
const outputParser = new ListLineOutputParser({
|
|
||||||
key: 'suggestions',
|
|
||||||
});
|
|
||||||
|
|
||||||
const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
|
|
||||||
return RunnableSequence.from([
|
|
||||||
RunnableMap.from({
|
|
||||||
chat_history: (input: SuggestionGeneratorInput) =>
|
|
||||||
formatChatHistoryAsString(input.chat_history),
|
|
||||||
}),
|
|
||||||
PromptTemplate.fromTemplate(suggestionGeneratorPrompt),
|
|
||||||
llm,
|
|
||||||
outputParser,
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const generateSuggestions = (
|
|
||||||
input: SuggestionGeneratorInput,
|
|
||||||
llm: BaseChatModel,
|
|
||||||
) => {
|
|
||||||
(llm as unknown as ChatOpenAI).temperature = 0;
|
|
||||||
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
|
|
||||||
return suggestionGeneratorChain.invoke(input);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default generateSuggestions;
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
import {
|
|
||||||
RunnableSequence,
|
|
||||||
RunnableMap,
|
|
||||||
RunnableLambda,
|
|
||||||
} from '@langchain/core/runnables';
|
|
||||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
|
||||||
import { searchSearxng } from '../searxng';
|
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
|
||||||
|
|
||||||
const videoSearchChainPrompt = `
|
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
|
||||||
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
|
||||||
`;
|
|
||||||
|
|
||||||
type VideoSearchChainInput = {
|
|
||||||
chat_history: BaseMessage[];
|
|
||||||
query: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
interface VideoSearchResult {
|
|
||||||
img_src: string;
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
iframe_src: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strParser = new StringOutputParser();
|
|
||||||
|
|
||||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
|
||||||
return RunnableSequence.from([
|
|
||||||
RunnableMap.from({
|
|
||||||
chat_history: (input: VideoSearchChainInput) => {
|
|
||||||
return formatChatHistoryAsString(input.chat_history);
|
|
||||||
},
|
|
||||||
query: (input: VideoSearchChainInput) => {
|
|
||||||
return input.query;
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
ChatPromptTemplate.fromMessages([
|
|
||||||
['system', videoSearchChainPrompt],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>How does a car work?</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>Theory of relativity</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>AC working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
|
|
||||||
],
|
|
||||||
]),
|
|
||||||
llm,
|
|
||||||
strParser,
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const queryParser = new LineOutputParser({
|
|
||||||
key: 'query',
|
|
||||||
});
|
|
||||||
return await queryParser.parse(input);
|
|
||||||
}),
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const res = await searchSearxng(input, {
|
|
||||||
engines: ['youtube'],
|
|
||||||
});
|
|
||||||
|
|
||||||
const videos: VideoSearchResult[] = [];
|
|
||||||
|
|
||||||
res.results.forEach((result) => {
|
|
||||||
if (
|
|
||||||
result.thumbnail &&
|
|
||||||
result.url &&
|
|
||||||
result.title &&
|
|
||||||
result.iframe_src
|
|
||||||
) {
|
|
||||||
videos.push({
|
|
||||||
img_src: result.thumbnail,
|
|
||||||
url: result.url,
|
|
||||||
title: result.title,
|
|
||||||
iframe_src: result.iframe_src,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return videos.slice(0, 10);
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleVideoSearch = (
|
|
||||||
input: VideoSearchChainInput,
|
|
||||||
llm: BaseChatModel,
|
|
||||||
) => {
|
|
||||||
const videoSearchChain = createVideoSearchChain(llm);
|
|
||||||
return videoSearchChain.invoke(input);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default handleVideoSearch;
|
|
||||||
7
src/lib/models/base/embedding.ts
Normal file
7
src/lib/models/base/embedding.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
abstract class BaseEmbedding<CONFIG> {
|
||||||
|
constructor(protected config: CONFIG) {}
|
||||||
|
abstract embedText(texts: string[]): Promise<number[][]>;
|
||||||
|
abstract embedChunks(chunks: Chunk[]): Promise<number[][]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default BaseEmbedding;
|
||||||
26
src/lib/models/base/llm.ts
Normal file
26
src/lib/models/base/llm.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import {
|
||||||
|
GenerateObjectInput,
|
||||||
|
GenerateObjectOutput,
|
||||||
|
GenerateOptions,
|
||||||
|
GenerateTextInput,
|
||||||
|
GenerateTextOutput,
|
||||||
|
StreamObjectOutput,
|
||||||
|
StreamTextOutput,
|
||||||
|
} from '../types';
|
||||||
|
|
||||||
|
abstract class BaseLLM<CONFIG> {
|
||||||
|
constructor(protected config: CONFIG) {}
|
||||||
|
abstract withOptions(options: GenerateOptions): this;
|
||||||
|
abstract generateText(input: GenerateTextInput): Promise<GenerateTextOutput>;
|
||||||
|
abstract streamText(
|
||||||
|
input: GenerateTextInput,
|
||||||
|
): AsyncGenerator<StreamTextOutput>;
|
||||||
|
abstract generateObject<T>(
|
||||||
|
input: GenerateObjectInput,
|
||||||
|
): Promise<GenerateObjectOutput<T>>;
|
||||||
|
abstract streamObject<T>(
|
||||||
|
input: GenerateObjectInput,
|
||||||
|
): AsyncGenerator<StreamObjectOutput<T>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default BaseLLM;
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
import { ModelList, ProviderMetadata } from '../types';
|
||||||
import { UIConfigField } from '@/lib/config/types';
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
|
import BaseLLM from './llm';
|
||||||
|
import BaseEmbedding from './embedding';
|
||||||
|
|
||||||
abstract class BaseModelProvider<CONFIG> {
|
abstract class BaseModelProvider<CONFIG> {
|
||||||
constructor(
|
constructor(
|
||||||
@@ -11,8 +13,8 @@ abstract class BaseModelProvider<CONFIG> {
|
|||||||
) {}
|
) {}
|
||||||
abstract getDefaultModels(): Promise<ModelList>;
|
abstract getDefaultModels(): Promise<ModelList>;
|
||||||
abstract getModelList(): Promise<ModelList>;
|
abstract getModelList(): Promise<ModelList>;
|
||||||
abstract loadChatModel(modelName: string): Promise<BaseChatModel>;
|
abstract loadChatModel(modelName: string): Promise<BaseLLM<any>>;
|
||||||
abstract loadEmbeddingModel(modelName: string): Promise<Embeddings>;
|
abstract loadEmbeddingModel(modelName: string): Promise<BaseEmbedding<any>>;
|
||||||
static getProviderConfigFields(): UIConfigField[] {
|
static getProviderConfigFields(): UIConfigField[] {
|
||||||
throw new Error('Method not implemented.');
|
throw new Error('Method not implemented.');
|
||||||
}
|
}
|
||||||
@@ -1,27 +1,11 @@
|
|||||||
import { ModelProviderUISection } from '@/lib/config/types';
|
import { ModelProviderUISection } from '@/lib/config/types';
|
||||||
import { ProviderConstructor } from './baseProvider';
|
import { ProviderConstructor } from '../base/provider';
|
||||||
import OpenAIProvider from './openai';
|
import OpenAIProvider from './openai';
|
||||||
import OllamaProvider from './ollama';
|
import OllamaProvider from './ollama';
|
||||||
import TransformersProvider from './transformers';
|
|
||||||
import AnthropicProvider from './anthropic';
|
|
||||||
import GeminiProvider from './gemini';
|
|
||||||
import GroqProvider from './groq';
|
|
||||||
import DeepSeekProvider from './deepseek';
|
|
||||||
import LMStudioProvider from './lmstudio';
|
|
||||||
import LemonadeProvider from './lemonade';
|
|
||||||
import AimlProvider from '@/lib/models/providers/aiml';
|
|
||||||
|
|
||||||
export const providers: Record<string, ProviderConstructor<any>> = {
|
export const providers: Record<string, ProviderConstructor<any>> = {
|
||||||
openai: OpenAIProvider,
|
openai: OpenAIProvider,
|
||||||
ollama: OllamaProvider,
|
ollama: OllamaProvider,
|
||||||
transformers: TransformersProvider,
|
|
||||||
anthropic: AnthropicProvider,
|
|
||||||
gemini: GeminiProvider,
|
|
||||||
groq: GroqProvider,
|
|
||||||
deepseek: DeepSeekProvider,
|
|
||||||
aiml: AimlProvider,
|
|
||||||
lmstudio: LMStudioProvider,
|
|
||||||
lemonade: LemonadeProvider,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getModelProvidersUIConfigSection =
|
export const getModelProvidersUIConfigSection =
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
|
||||||
import BaseModelProvider from './baseProvider';
|
|
||||||
import { ChatOllama, OllamaEmbeddings } from '@langchain/ollama';
|
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
|
||||||
import { UIConfigField } from '@/lib/config/types';
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||||
|
import BaseModelProvider from '../../base/provider';
|
||||||
|
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||||
|
import BaseLLM from '../../base/llm';
|
||||||
|
import BaseEmbedding from '../../base/embedding';
|
||||||
|
import OllamaLLM from './ollamaLLM';
|
||||||
|
import OllamaEmbedding from './ollamaEmbedding';
|
||||||
|
|
||||||
interface OllamaConfig {
|
interface OllamaConfig {
|
||||||
baseURL: string;
|
baseURL: string;
|
||||||
@@ -76,7 +77,7 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||||
const modelList = await this.getModelList();
|
const modelList = await this.getModelList();
|
||||||
|
|
||||||
const exists = modelList.chat.find((m) => m.key === key);
|
const exists = modelList.chat.find((m) => m.key === key);
|
||||||
@@ -87,14 +88,13 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ChatOllama({
|
return new OllamaLLM({
|
||||||
temperature: 0.7,
|
baseURL: this.config.baseURL,
|
||||||
model: key,
|
model: key,
|
||||||
baseUrl: this.config.baseURL,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||||
const modelList = await this.getModelList();
|
const modelList = await this.getModelList();
|
||||||
const exists = modelList.embedding.find((m) => m.key === key);
|
const exists = modelList.embedding.find((m) => m.key === key);
|
||||||
|
|
||||||
@@ -104,9 +104,9 @@ class OllamaProvider extends BaseModelProvider<OllamaConfig> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new OllamaEmbeddings({
|
return new OllamaEmbedding({
|
||||||
model: key,
|
model: key,
|
||||||
baseUrl: this.config.baseURL,
|
baseURL: this.config.baseURL,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
39
src/lib/models/providers/ollama/ollamaEmbedding.ts
Normal file
39
src/lib/models/providers/ollama/ollamaEmbedding.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { Ollama } from 'ollama';
|
||||||
|
import BaseEmbedding from '../../base/embedding';
|
||||||
|
|
||||||
|
type OllamaConfig = {
|
||||||
|
model: string;
|
||||||
|
baseURL?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
class OllamaEmbedding extends BaseEmbedding<OllamaConfig> {
|
||||||
|
ollamaClient: Ollama;
|
||||||
|
|
||||||
|
constructor(protected config: OllamaConfig) {
|
||||||
|
super(config);
|
||||||
|
|
||||||
|
this.ollamaClient = new Ollama({
|
||||||
|
host: this.config.baseURL || 'http://localhost:11434',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async embedText(texts: string[]): Promise<number[][]> {
|
||||||
|
const response = await this.ollamaClient.embed({
|
||||||
|
input: texts,
|
||||||
|
model: this.config.model,
|
||||||
|
});
|
||||||
|
|
||||||
|
return response.embeddings;
|
||||||
|
}
|
||||||
|
|
||||||
|
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
|
||||||
|
const response = await this.ollamaClient.embed({
|
||||||
|
input: chunks.map((c) => c.content),
|
||||||
|
model: this.config.model,
|
||||||
|
});
|
||||||
|
|
||||||
|
return response.embeddings;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OllamaEmbedding;
|
||||||
149
src/lib/models/providers/ollama/ollamaLLM.ts
Normal file
149
src/lib/models/providers/ollama/ollamaLLM.ts
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
import z from 'zod';
|
||||||
|
import BaseLLM from '../../base/llm';
|
||||||
|
import {
|
||||||
|
GenerateObjectInput,
|
||||||
|
GenerateOptions,
|
||||||
|
GenerateTextInput,
|
||||||
|
GenerateTextOutput,
|
||||||
|
StreamTextOutput,
|
||||||
|
} from '../../types';
|
||||||
|
import { Ollama } from 'ollama';
|
||||||
|
import { parse } from 'partial-json';
|
||||||
|
|
||||||
|
type OllamaConfig = {
|
||||||
|
baseURL: string;
|
||||||
|
model: string;
|
||||||
|
options?: GenerateOptions;
|
||||||
|
};
|
||||||
|
|
||||||
|
class OllamaLLM extends BaseLLM<OllamaConfig> {
|
||||||
|
ollamaClient: Ollama;
|
||||||
|
|
||||||
|
constructor(protected config: OllamaConfig) {
|
||||||
|
super(config);
|
||||||
|
|
||||||
|
this.ollamaClient = new Ollama({
|
||||||
|
host: this.config.baseURL || 'http://localhost:11434',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
withOptions(options: GenerateOptions) {
|
||||||
|
this.config.options = {
|
||||||
|
...this.config.options,
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const res = await this.ollamaClient.chat({
|
||||||
|
model: this.config.model,
|
||||||
|
messages: input.messages,
|
||||||
|
options: {
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
temperature: this.config.options?.temperature,
|
||||||
|
num_predict: this.config.options?.maxTokens,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: res.message.content,
|
||||||
|
additionalInfo: {
|
||||||
|
reasoning: res.message.thinking,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async *streamText(
|
||||||
|
input: GenerateTextInput,
|
||||||
|
): AsyncGenerator<StreamTextOutput> {
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const stream = await this.ollamaClient.chat({
|
||||||
|
model: this.config.model,
|
||||||
|
messages: input.messages,
|
||||||
|
stream: true,
|
||||||
|
options: {
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
temperature: this.config.options?.temperature,
|
||||||
|
num_predict: this.config.options?.maxTokens,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
yield {
|
||||||
|
contentChunk: chunk.message.content,
|
||||||
|
done: chunk.done,
|
||||||
|
additionalInfo: {
|
||||||
|
reasoning: chunk.message.thinking,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateObject<T>(input: GenerateObjectInput): Promise<T> {
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const response = await this.ollamaClient.chat({
|
||||||
|
model: this.config.model,
|
||||||
|
messages: input.messages,
|
||||||
|
format: z.toJSONSchema(input.schema),
|
||||||
|
options: {
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
temperature: this.config.options?.temperature,
|
||||||
|
num_predict: this.config.options?.maxTokens,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
return input.schema.parse(JSON.parse(response.message.content)) as T;
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(`Error parsing response from Ollama: ${err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async *streamObject<T>(input: GenerateObjectInput): AsyncGenerator<T> {
|
||||||
|
let recievedObj: string = '';
|
||||||
|
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const stream = await this.ollamaClient.chat({
|
||||||
|
model: this.config.model,
|
||||||
|
messages: input.messages,
|
||||||
|
format: z.toJSONSchema(input.schema),
|
||||||
|
stream: true,
|
||||||
|
options: {
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
temperature: this.config.options?.temperature,
|
||||||
|
num_predict: this.config.options?.maxTokens,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
recievedObj += chunk.message.content;
|
||||||
|
|
||||||
|
try {
|
||||||
|
yield parse(recievedObj) as T;
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Error parsing partial object from Ollama:', err);
|
||||||
|
yield {} as T;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OllamaLLM;
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { Model, ModelList, ProviderMetadata } from '../types';
|
|
||||||
import BaseModelProvider from './baseProvider';
|
|
||||||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
|
||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
import { UIConfigField } from '@/lib/config/types';
|
import { UIConfigField } from '@/lib/config/types';
|
||||||
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
|
||||||
|
import { Model, ModelList, ProviderMetadata } from '../../types';
|
||||||
|
import OpenAIEmbedding from './openaiEmbedding';
|
||||||
|
import BaseEmbedding from '../../base/embedding';
|
||||||
|
import BaseModelProvider from '../../base/provider';
|
||||||
|
import BaseLLM from '../../base/llm';
|
||||||
|
import OpenAILLM from './openaiLLM';
|
||||||
|
|
||||||
interface OpenAIConfig {
|
interface OpenAIConfig {
|
||||||
apiKey: string;
|
apiKey: string;
|
||||||
@@ -145,7 +148,7 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async loadChatModel(key: string): Promise<BaseChatModel> {
|
async loadChatModel(key: string): Promise<BaseLLM<any>> {
|
||||||
const modelList = await this.getModelList();
|
const modelList = await this.getModelList();
|
||||||
|
|
||||||
const exists = modelList.chat.find((m) => m.key === key);
|
const exists = modelList.chat.find((m) => m.key === key);
|
||||||
@@ -156,17 +159,14 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ChatOpenAI({
|
return new OpenAILLM({
|
||||||
apiKey: this.config.apiKey,
|
apiKey: this.config.apiKey,
|
||||||
temperature: 0.7,
|
|
||||||
model: key,
|
model: key,
|
||||||
configuration: {
|
|
||||||
baseURL: this.config.baseURL,
|
baseURL: this.config.baseURL,
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async loadEmbeddingModel(key: string): Promise<Embeddings> {
|
async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
|
||||||
const modelList = await this.getModelList();
|
const modelList = await this.getModelList();
|
||||||
const exists = modelList.embedding.find((m) => m.key === key);
|
const exists = modelList.embedding.find((m) => m.key === key);
|
||||||
|
|
||||||
@@ -176,12 +176,10 @@ class OpenAIProvider extends BaseModelProvider<OpenAIConfig> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new OpenAIEmbeddings({
|
return new OpenAIEmbedding({
|
||||||
apiKey: this.config.apiKey,
|
apiKey: this.config.apiKey,
|
||||||
model: key,
|
model: key,
|
||||||
configuration: {
|
|
||||||
baseURL: this.config.baseURL,
|
baseURL: this.config.baseURL,
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
41
src/lib/models/providers/openai/openaiEmbedding.ts
Normal file
41
src/lib/models/providers/openai/openaiEmbedding.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import OpenAI from 'openai';
|
||||||
|
import BaseEmbedding from '../../base/embedding';
|
||||||
|
|
||||||
|
type OpenAIConfig = {
|
||||||
|
apiKey: string;
|
||||||
|
model: string;
|
||||||
|
baseURL?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
class OpenAIEmbedding extends BaseEmbedding<OpenAIConfig> {
|
||||||
|
openAIClient: OpenAI;
|
||||||
|
|
||||||
|
constructor(protected config: OpenAIConfig) {
|
||||||
|
super(config);
|
||||||
|
|
||||||
|
this.openAIClient = new OpenAI({
|
||||||
|
apiKey: config.apiKey,
|
||||||
|
baseURL: config.baseURL,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async embedText(texts: string[]): Promise<number[][]> {
|
||||||
|
const response = await this.openAIClient.embeddings.create({
|
||||||
|
model: this.config.model,
|
||||||
|
input: texts,
|
||||||
|
});
|
||||||
|
|
||||||
|
return response.data.map((embedding) => embedding.embedding);
|
||||||
|
}
|
||||||
|
|
||||||
|
async embedChunks(chunks: Chunk[]): Promise<number[][]> {
|
||||||
|
const response = await this.openAIClient.embeddings.create({
|
||||||
|
model: this.config.model,
|
||||||
|
input: chunks.map((c) => c.content),
|
||||||
|
});
|
||||||
|
|
||||||
|
return response.data.map((embedding) => embedding.embedding);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OpenAIEmbedding;
|
||||||
163
src/lib/models/providers/openai/openaiLLM.ts
Normal file
163
src/lib/models/providers/openai/openaiLLM.ts
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
import OpenAI from 'openai';
|
||||||
|
import BaseLLM from '../../base/llm';
|
||||||
|
import { zodTextFormat, zodResponseFormat } from 'openai/helpers/zod';
|
||||||
|
import {
|
||||||
|
GenerateObjectInput,
|
||||||
|
GenerateOptions,
|
||||||
|
GenerateTextInput,
|
||||||
|
GenerateTextOutput,
|
||||||
|
StreamTextOutput,
|
||||||
|
} from '../../types';
|
||||||
|
import { parse } from 'partial-json';
|
||||||
|
|
||||||
|
type OpenAIConfig = {
|
||||||
|
apiKey: string;
|
||||||
|
model: string;
|
||||||
|
baseURL?: string;
|
||||||
|
options?: GenerateOptions;
|
||||||
|
};
|
||||||
|
|
||||||
|
class OpenAILLM extends BaseLLM<OpenAIConfig> {
|
||||||
|
openAIClient: OpenAI;
|
||||||
|
|
||||||
|
constructor(protected config: OpenAIConfig) {
|
||||||
|
super(config);
|
||||||
|
|
||||||
|
this.openAIClient = new OpenAI({
|
||||||
|
apiKey: this.config.apiKey,
|
||||||
|
baseURL: this.config.baseURL || 'https://api.openai.com/v1',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
withOptions(options: GenerateOptions) {
|
||||||
|
this.config.options = {
|
||||||
|
...this.config.options,
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateText(input: GenerateTextInput): Promise<GenerateTextOutput> {
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const response = await this.openAIClient.chat.completions.create({
|
||||||
|
model: this.config.model,
|
||||||
|
messages: input.messages,
|
||||||
|
temperature: this.config.options?.temperature || 1.0,
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
max_completion_tokens: this.config.options?.maxTokens,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.choices && response.choices.length > 0) {
|
||||||
|
return {
|
||||||
|
content: response.choices[0].message.content!,
|
||||||
|
additionalInfo: {
|
||||||
|
finishReason: response.choices[0].finish_reason,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('No response from OpenAI');
|
||||||
|
}
|
||||||
|
|
||||||
|
async *streamText(
|
||||||
|
input: GenerateTextInput,
|
||||||
|
): AsyncGenerator<StreamTextOutput> {
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const stream = await this.openAIClient.chat.completions.create({
|
||||||
|
model: this.config.model,
|
||||||
|
messages: input.messages,
|
||||||
|
temperature: this.config.options?.temperature || 1.0,
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
max_completion_tokens: this.config.options?.maxTokens,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
stream: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
if (chunk.choices && chunk.choices.length > 0) {
|
||||||
|
yield {
|
||||||
|
contentChunk: chunk.choices[0].delta.content || '',
|
||||||
|
done: chunk.choices[0].finish_reason !== null,
|
||||||
|
additionalInfo: {
|
||||||
|
finishReason: chunk.choices[0].finish_reason,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateObject<T>(input: GenerateObjectInput): Promise<T> {
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const response = await this.openAIClient.chat.completions.parse({
|
||||||
|
messages: input.messages,
|
||||||
|
model: this.config.model,
|
||||||
|
temperature: this.config.options?.temperature || 1.0,
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
max_completion_tokens: this.config.options?.maxTokens,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
response_format: zodResponseFormat(input.schema, 'object'),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.choices && response.choices.length > 0) {
|
||||||
|
try {
|
||||||
|
return input.schema.parse(response.choices[0].message.parsed) as T;
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(`Error parsing response from OpenAI: ${err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('No response from OpenAI');
|
||||||
|
}
|
||||||
|
|
||||||
|
async *streamObject<T>(input: GenerateObjectInput): AsyncGenerator<T> {
|
||||||
|
let recievedObj: string = '';
|
||||||
|
|
||||||
|
this.withOptions(input.options || {});
|
||||||
|
|
||||||
|
const stream = this.openAIClient.responses.stream({
|
||||||
|
model: this.config.model,
|
||||||
|
input: input.messages,
|
||||||
|
temperature: this.config.options?.temperature || 1.0,
|
||||||
|
top_p: this.config.options?.topP,
|
||||||
|
max_completion_tokens: this.config.options?.maxTokens,
|
||||||
|
stop: this.config.options?.stopSequences,
|
||||||
|
frequency_penalty: this.config.options?.frequencyPenalty,
|
||||||
|
presence_penalty: this.config.options?.presencePenalty,
|
||||||
|
text: {
|
||||||
|
format: zodTextFormat(input.schema, 'object'),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
if (chunk.type === 'response.output_text.delta' && chunk.delta) {
|
||||||
|
recievedObj += chunk.delta;
|
||||||
|
|
||||||
|
try {
|
||||||
|
yield parse(recievedObj) as T;
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Error parsing partial object from OpenAI:', err);
|
||||||
|
yield {} as T;
|
||||||
|
}
|
||||||
|
} else if (chunk.type === 'response.output_text.done' && chunk.text) {
|
||||||
|
try {
|
||||||
|
yield parse(chunk.text) as T;
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(`Error parsing response from OpenAI: ${err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OpenAILLM;
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import z from 'zod';
|
||||||
|
|
||||||
type Model = {
|
type Model = {
|
||||||
name: string;
|
name: string;
|
||||||
key: string;
|
key: string;
|
||||||
@@ -25,10 +27,59 @@ type ModelWithProvider = {
|
|||||||
providerId: string;
|
providerId: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type GenerateOptions = {
|
||||||
|
temperature?: number;
|
||||||
|
maxTokens?: number;
|
||||||
|
topP?: number;
|
||||||
|
stopSequences?: string[];
|
||||||
|
frequencyPenalty?: number;
|
||||||
|
presencePenalty?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenerateTextInput = {
|
||||||
|
messages: Message[];
|
||||||
|
options?: GenerateOptions;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenerateTextOutput = {
|
||||||
|
content: string;
|
||||||
|
additionalInfo?: Record<string, any>;
|
||||||
|
};
|
||||||
|
|
||||||
|
type StreamTextOutput = {
|
||||||
|
contentChunk: string;
|
||||||
|
additionalInfo?: Record<string, any>;
|
||||||
|
done?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenerateObjectInput = {
|
||||||
|
schema: z.ZodTypeAny;
|
||||||
|
messages: Message[];
|
||||||
|
options?: GenerateOptions;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenerateObjectOutput<T> = {
|
||||||
|
object: T;
|
||||||
|
additionalInfo?: Record<string, any>;
|
||||||
|
};
|
||||||
|
|
||||||
|
type StreamObjectOutput<T> = {
|
||||||
|
objectChunk: Partial<T>;
|
||||||
|
additionalInfo?: Record<string, any>;
|
||||||
|
done?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
export type {
|
export type {
|
||||||
Model,
|
Model,
|
||||||
ModelList,
|
ModelList,
|
||||||
ProviderMetadata,
|
ProviderMetadata,
|
||||||
MinimalProvider,
|
MinimalProvider,
|
||||||
ModelWithProvider,
|
ModelWithProvider,
|
||||||
|
GenerateOptions,
|
||||||
|
GenerateTextInput,
|
||||||
|
GenerateTextOutput,
|
||||||
|
StreamTextOutput,
|
||||||
|
GenerateObjectInput,
|
||||||
|
GenerateObjectOutput,
|
||||||
|
StreamObjectOutput,
|
||||||
};
|
};
|
||||||
|
|||||||
26
src/lib/prompts/media/image.ts
Normal file
26
src/lib/prompts/media/image.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { BaseMessageLike } from "@langchain/core/messages";
|
||||||
|
|
||||||
|
export const imageSearchPrompt = `
|
||||||
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
||||||
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
|
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const imageSearchFewShots: BaseMessageLike[] = [
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>A cat</query>'],
|
||||||
|
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>Car working</query>'],
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>AC working</query>']
|
||||||
|
]
|
||||||
25
src/lib/prompts/media/videos.ts
Normal file
25
src/lib/prompts/media/videos.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { BaseMessageLike } from "@langchain/core/messages";
|
||||||
|
|
||||||
|
export const videoSearchPrompt = `
|
||||||
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||||
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
|
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const videoSearchFewShots: BaseMessageLike[] = [
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>How does a car work?</query>'],
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>Theory of relativity</query>'],
|
||||||
|
[
|
||||||
|
'user',
|
||||||
|
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
||||||
|
],
|
||||||
|
['assistant', '<query>AC working</query>'],
|
||||||
|
]
|
||||||
15
src/lib/prompts/suggestions/index.ts
Normal file
15
src/lib/prompts/suggestions/index.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
export const suggestionGeneratorPrompt = `
|
||||||
|
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
|
||||||
|
You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information.
|
||||||
|
Make sure the suggestions are medium in length and are informative and relevant to the conversation.
|
||||||
|
|
||||||
|
Provide these suggestions separated by newlines between the XML tags <suggestions> and </suggestions>. For example:
|
||||||
|
|
||||||
|
<suggestions>
|
||||||
|
Tell me more about SpaceX and their recent projects
|
||||||
|
What is the latest news on SpaceX?
|
||||||
|
Who is the CEO of SpaceX?
|
||||||
|
</suggestions>
|
||||||
|
|
||||||
|
Today's date is ${new Date().toISOString()}
|
||||||
|
`;
|
||||||
9
src/lib/types.ts
Normal file
9
src/lib/types.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
type Message = {
|
||||||
|
role: 'user' | 'assistant' | 'system';
|
||||||
|
content: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type Chunk = {
|
||||||
|
content: string;
|
||||||
|
metadata: Record<string, any>;
|
||||||
|
};
|
||||||
141
yarn.lock
141
yarn.lock
@@ -746,19 +746,19 @@
|
|||||||
"@jridgewell/resolve-uri" "^3.1.0"
|
"@jridgewell/resolve-uri" "^3.1.0"
|
||||||
"@jridgewell/sourcemap-codec" "^1.4.14"
|
"@jridgewell/sourcemap-codec" "^1.4.14"
|
||||||
|
|
||||||
"@langchain/anthropic@^1.0.0":
|
"@langchain/anthropic@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-1.0.0.tgz#48535c5682851bf8fddcf37aa7ca78d4d93da932"
|
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-1.0.1.tgz#a9f836b11ecbce282fc2afb8d707c52fd37711c4"
|
||||||
integrity sha512-Lud/FrkFmXMYW5R9y0FC+RGdgjBBVQ2JAnG3A8E1I4+sqv5JgJttw3HKRpFkyBUSyacs6LMfSn5dbJ6TT9nMiQ==
|
integrity sha512-yVKePAT+nNHtybyyPlWqiq6lqcoDlIuMgL9B4WMEU5gbmzL170iodiqcgcZNFQLOC1V2wCOzywq6Zr0kB24AFg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@anthropic-ai/sdk" "^0.65.0"
|
"@anthropic-ai/sdk" "^0.65.0"
|
||||||
|
|
||||||
"@langchain/classic@1.0.0":
|
"@langchain/classic@1.0.3":
|
||||||
version "1.0.0"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/classic/-/classic-1.0.0.tgz#acbc15eebba03499cf93e73d2c93703a3da0a46e"
|
resolved "https://registry.yarnpkg.com/@langchain/classic/-/classic-1.0.3.tgz#92482cb4cb8692407b4ecde0df312f035934472f"
|
||||||
integrity sha512-darZFvO5g5e3TqZ4rvZ938F94D4a34v2ZdWfyipmyu7WB4RXMshmYtWCp98o4ec3bfRD9S4+oHMmaPcnk5cs5A==
|
integrity sha512-XyoaiJSi4y7SzrZMCb3DdDfC+M3gqIQpVH2cOCh9xQf4244jNrncpLXF/MwOJYWxzTsjfcCAHIbFJ0kSH5nqmg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/openai" "1.0.0-alpha.3"
|
"@langchain/openai" "1.1.1"
|
||||||
"@langchain/textsplitters" "1.0.0"
|
"@langchain/textsplitters" "1.0.0"
|
||||||
handlebars "^4.7.8"
|
handlebars "^4.7.8"
|
||||||
js-yaml "^4.1.0"
|
js-yaml "^4.1.0"
|
||||||
@@ -771,24 +771,24 @@
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
langsmith "^0.3.64"
|
langsmith "^0.3.64"
|
||||||
|
|
||||||
"@langchain/community@^1.0.0":
|
"@langchain/community@^1.0.3":
|
||||||
version "1.0.0"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-1.0.0.tgz#8e587605b7c981882e20281aa9e644a166620145"
|
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-1.0.3.tgz#278c82eee22ff37b120e182b07b7c23ffc6786ab"
|
||||||
integrity sha512-CM4vUZHaFHq8HpWBMIWPO5bo/rmRPJ1/iaJk7s8CghkkQ0WLaZzDtoG/wJKJZMDJOUVCtZKTw+TytlGu00/9dg==
|
integrity sha512-86L7qooSY8Fh5Sf2Tu/X8PvDJqvEXohyZUGusuv0XtnWGivwtecBm0vEbVPkLF07I2ZMtyAGzHJOblbveq6Nmg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/classic" "1.0.0"
|
"@langchain/classic" "1.0.3"
|
||||||
"@langchain/openai" "1.0.0"
|
"@langchain/openai" "1.1.1"
|
||||||
binary-extensions "^2.2.0"
|
binary-extensions "^2.2.0"
|
||||||
expr-eval "^2.0.2"
|
|
||||||
flat "^5.0.2"
|
flat "^5.0.2"
|
||||||
js-yaml "^4.1.0"
|
js-yaml "^4.1.0"
|
||||||
|
math-expression-evaluator "^2.0.0"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
|
|
||||||
"@langchain/core@^1.0.1":
|
"@langchain/core@^1.0.5":
|
||||||
version "1.0.1"
|
version "1.0.5"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-1.0.1.tgz#c2bdbdff87649fe17b2c86bf535d749ac73a586c"
|
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-1.0.5.tgz#1e20ecce80fa4d0b979ea05b24b879b8357d8092"
|
||||||
integrity sha512-hVM3EkojYOk4ISJQKjLuWYSH6kyyOFlZIrLFETDA1L0Z2/Iu0q32aJawZ0FDn6rlXE8QZjBt/9OaOL36rXc05w==
|
integrity sha512-9Hy/b9+j+mm0Bhnm8xD9B0KpBYTidroLrDHdbrHoMC2DqXoY2umvi1M3M/9D744qsMSaIMP0ZwFcy5YbqI/dGw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@cfworker/json-schema" "^4.0.2"
|
"@cfworker/json-schema" "^4.0.2"
|
||||||
ansi-styles "^5.0.0"
|
ansi-styles "^5.0.0"
|
||||||
@@ -802,18 +802,18 @@
|
|||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
|
|
||||||
"@langchain/google-genai@^1.0.0":
|
"@langchain/google-genai@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-1.0.0.tgz#2785fa163788cb6214dffc1dc29fcd5bbb751493"
|
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-1.0.1.tgz#3601645f652f24e3beb55acc61878070b49c24ed"
|
||||||
integrity sha512-ICUBZl/46nG6+Yhe5v7kp/2TQBGOzqEkpfKPLDeNyJ4x9OOL46xsW3ZZrHJjhGMQuR6/JMmQMTU9kLoYgsd1Tg==
|
integrity sha512-a9Bzaswp1P+eA2V8hAWSBypqjxmH+/zhOY1TBdalQuPQBTRH35jBMVgX3CTTAheAzBUGQtlDD4/dR9tyemDbhw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@google/generative-ai" "^0.24.0"
|
"@google/generative-ai" "^0.24.0"
|
||||||
uuid "^11.1.0"
|
uuid "^11.1.0"
|
||||||
|
|
||||||
"@langchain/groq@^1.0.0":
|
"@langchain/groq@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/groq/-/groq-1.0.0.tgz#413b02158761ff406238467325cd4f9fe0990f3a"
|
resolved "https://registry.yarnpkg.com/@langchain/groq/-/groq-1.0.1.tgz#7ec8822cd2f29eef4ae0f9c20f67268d1924ab96"
|
||||||
integrity sha512-6fG9MEQHNXnxgObFHSPh+BPYyTGcoDnKd+GhI9l96cpHh+QNI+IvypicRCZVSsLdqzRCFHISvBQaH+SP5vgjIw==
|
integrity sha512-vDQzv6A3mjG0/W/7vL4Iq+dnmhSbMHln+b7Rna810trjZzfNPZhAP6omqZyzCKIqjsQYUH4ODLnSUCNiarfYsQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
groq-sdk "^0.19.0"
|
groq-sdk "^0.19.0"
|
||||||
|
|
||||||
@@ -842,30 +842,30 @@
|
|||||||
"@langchain/langgraph-sdk" "~1.0.0"
|
"@langchain/langgraph-sdk" "~1.0.0"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
|
|
||||||
"@langchain/ollama@^1.0.0":
|
"@langchain/langgraph@^1.0.1":
|
||||||
version "1.0.0"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-1.0.0.tgz#803c353e9dfb1a9e7b20f1460a6a201fec29bb77"
|
resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-1.0.1.tgz#d0be714653e8a27665f86ea795c5c34189455406"
|
||||||
integrity sha512-zqn6i7haMjvZW4FQWo0GrF4wYL5mLurdL0qoe+moYWYSCGaay4K7e/4dqM5C/MR16/HPFDzFbBRMkni2PDRBgA==
|
integrity sha512-7y8OTDLrHrpJ55Y5x7c7zU2BbqNllXwxM106Xrd+NaQB5CpEb4hbUfIwe4XmhhscKPwvhXAq3tjeUxw9MCiurQ==
|
||||||
|
dependencies:
|
||||||
|
"@langchain/langgraph-checkpoint" "^1.0.0"
|
||||||
|
"@langchain/langgraph-sdk" "~1.0.0"
|
||||||
|
uuid "^10.0.0"
|
||||||
|
|
||||||
|
"@langchain/ollama@^1.0.1":
|
||||||
|
version "1.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@langchain/ollama/-/ollama-1.0.1.tgz#c63ac6db65110beef4020a5e2b167ad0bc678d33"
|
||||||
|
integrity sha512-Pe32hhTpMvnRlNFJxkdu6r1QzsONGz5uvoLiMU1TpgAUu7EyKr2osymlgjBLqDe2vMKUmqHb+yWRH0IppDBUOg==
|
||||||
dependencies:
|
dependencies:
|
||||||
ollama "^0.5.12"
|
ollama "^0.5.12"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
|
|
||||||
"@langchain/openai@1.0.0", "@langchain/openai@^1.0.0":
|
"@langchain/openai@1.1.1", "@langchain/openai@^1.1.1":
|
||||||
version "1.0.0"
|
version "1.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.0.0.tgz#03b15312286b30ce0149f6052620c6c95b4387bc"
|
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.1.1.tgz#67ddcf54ee7ac402f6b75b4b9e25447e78c56a93"
|
||||||
integrity sha512-olKEUIjb3HBOiD/NR056iGJz4wiN6HhQ/u65YmGWYadWWoKOcGwheBw/FE0x6SH4zDlI3QmP+vMhuQoaww19BQ==
|
integrity sha512-0kUaXejo/sn6QAohWHDaAUapC4CJRkJIajGaWfJC+llSqpDBnmBE1oHg1M2fi1OCeP+ns9SxB6BTsq4Qbiqmig==
|
||||||
dependencies:
|
dependencies:
|
||||||
js-tiktoken "^1.0.12"
|
js-tiktoken "^1.0.12"
|
||||||
openai "^6.3.0"
|
openai "^6.9.0"
|
||||||
zod "^3.25.76 || ^4"
|
|
||||||
|
|
||||||
"@langchain/openai@1.0.0-alpha.3":
|
|
||||||
version "1.0.0-alpha.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-1.0.0-alpha.3.tgz#35c4e770e3421b75a226087af54fbeff147e201a"
|
|
||||||
integrity sha512-re2NXLYeLatPzoB6YRoFgB1fW6i5ygcLGa7PlNOhi3f93uU1vSlWMgjkO9dcN9ALmr/bhoruqJEn7U0Eva+6/w==
|
|
||||||
dependencies:
|
|
||||||
js-tiktoken "^1.0.12"
|
|
||||||
openai "^6.3.0"
|
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
|
|
||||||
"@langchain/textsplitters@1.0.0", "@langchain/textsplitters@^1.0.0":
|
"@langchain/textsplitters@1.0.0", "@langchain/textsplitters@^1.0.0":
|
||||||
@@ -2607,11 +2607,6 @@ expand-template@^2.0.3:
|
|||||||
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
|
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
|
||||||
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
|
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
|
||||||
|
|
||||||
expr-eval@^2.0.2:
|
|
||||||
version "2.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/expr-eval/-/expr-eval-2.0.2.tgz#fa6f044a7b0c93fde830954eb9c5b0f7fbc7e201"
|
|
||||||
integrity sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg==
|
|
||||||
|
|
||||||
fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
|
fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
|
||||||
version "3.1.3"
|
version "3.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
|
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
|
||||||
@@ -3514,17 +3509,16 @@ kuler@^2.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
|
resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
|
||||||
integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
|
integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
|
||||||
|
|
||||||
langchain@^1.0.1:
|
langchain@^1.0.4:
|
||||||
version "1.0.1"
|
version "1.0.4"
|
||||||
resolved "https://registry.yarnpkg.com/langchain/-/langchain-1.0.1.tgz#fb181176f4aa443ef02e9e5b563bcb4e170dfeb6"
|
resolved "https://registry.yarnpkg.com/langchain/-/langchain-1.0.4.tgz#c4fa22d927f41d56c356ecfccea5c08ae7b682ef"
|
||||||
integrity sha512-IT4JBVbKBh2AjaUFT9OsmOfeK3UbKy3SgdzZOuvet25sAaMpAR8IaM9XVddRs+OXQqVg6sOS01KUUVCJksVhHg==
|
integrity sha512-g7z2kKvnXOecybbVGHfI2ZmdmP309mxC1FYlq6WC/7RsKgX5MwY9gBjwK16mpKOaozOD9QCo1Ia7o2UcUBRb9Q==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/langgraph" "^1.0.0"
|
"@langchain/langgraph" "^1.0.0"
|
||||||
"@langchain/langgraph-checkpoint" "^1.0.0"
|
"@langchain/langgraph-checkpoint" "^1.0.0"
|
||||||
|
langsmith "~0.3.74"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
zod "^3.25.76 || ^4"
|
zod "^3.25.76 || ^4"
|
||||||
optionalDependencies:
|
|
||||||
langsmith "^0.3.64"
|
|
||||||
|
|
||||||
langsmith@^0.3.64:
|
langsmith@^0.3.64:
|
||||||
version "0.3.74"
|
version "0.3.74"
|
||||||
@@ -3539,6 +3533,19 @@ langsmith@^0.3.64:
|
|||||||
semver "^7.6.3"
|
semver "^7.6.3"
|
||||||
uuid "^10.0.0"
|
uuid "^10.0.0"
|
||||||
|
|
||||||
|
langsmith@~0.3.74:
|
||||||
|
version "0.3.79"
|
||||||
|
resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.3.79.tgz#6c845644da26e7fdd8e9b80706091669fc43bda4"
|
||||||
|
integrity sha512-j5uiAsyy90zxlxaMuGjb7EdcL51Yx61SpKfDOI1nMPBbemGju+lf47he4e59Hp5K63CY8XWgFP42WeZ+zuIU4Q==
|
||||||
|
dependencies:
|
||||||
|
"@types/uuid" "^10.0.0"
|
||||||
|
chalk "^4.1.2"
|
||||||
|
console-table-printer "^2.12.1"
|
||||||
|
p-queue "^6.6.2"
|
||||||
|
p-retry "4"
|
||||||
|
semver "^7.6.3"
|
||||||
|
uuid "^10.0.0"
|
||||||
|
|
||||||
language-subtag-registry@^0.3.20:
|
language-subtag-registry@^0.3.20:
|
||||||
version "0.3.22"
|
version "0.3.22"
|
||||||
resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d"
|
resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d"
|
||||||
@@ -3686,6 +3693,11 @@ matcher@^3.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
escape-string-regexp "^4.0.0"
|
escape-string-regexp "^4.0.0"
|
||||||
|
|
||||||
|
math-expression-evaluator@^2.0.0:
|
||||||
|
version "2.0.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-2.0.7.tgz#dc99a80ce2bf7f9b7df878126feb5c506c1fdf5f"
|
||||||
|
integrity sha512-uwliJZ6BPHRq4eiqNWxZBDzKUiS5RIynFFcgchqhBOloVLVBpZpNG8jRYkedLcBvhph8TnRyWEuxPqiQcwIdog==
|
||||||
|
|
||||||
math-intrinsics@^1.1.0:
|
math-intrinsics@^1.1.0:
|
||||||
version "1.1.0"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9"
|
resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9"
|
||||||
@@ -4025,10 +4037,10 @@ onnxruntime-web@1.22.0-dev.20250409-89f8206ba4:
|
|||||||
platform "^1.3.6"
|
platform "^1.3.6"
|
||||||
protobufjs "^7.2.4"
|
protobufjs "^7.2.4"
|
||||||
|
|
||||||
openai@^6.3.0:
|
openai@^6.9.0:
|
||||||
version "6.5.0"
|
version "6.9.0"
|
||||||
resolved "https://registry.yarnpkg.com/openai/-/openai-6.5.0.tgz#7dd9c4c0ca6e394c1d1e738b2000e084024685b2"
|
resolved "https://registry.yarnpkg.com/openai/-/openai-6.9.0.tgz#acd15b2233c42b165981f3de8f4cfce27f844fce"
|
||||||
integrity sha512-bNqJ15Ijbs41KuJ2iYz/mGAruFHzQQt7zXo4EvjNLoB64aJdgn1jlMeDTsXjEg+idVYafg57QB/5Rd16oqvZ6A==
|
integrity sha512-n2sJRYmM+xfJ0l3OfH8eNnIyv3nQY7L08gZQu3dw6wSdfPtKAk92L83M2NIP5SS8Cl/bsBBG3yKzEOjkx0O+7A==
|
||||||
|
|
||||||
openapi-types@^12.1.3:
|
openapi-types@^12.1.3:
|
||||||
version "12.1.3"
|
version "12.1.3"
|
||||||
@@ -5491,12 +5503,7 @@ yocto-queue@^0.1.0:
|
|||||||
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
|
||||||
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
|
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
|
||||||
|
|
||||||
zod@^3.22.4:
|
"zod@^3.25.76 || ^4", zod@^4.1.12:
|
||||||
version "3.22.4"
|
|
||||||
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
|
|
||||||
integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==
|
|
||||||
|
|
||||||
"zod@^3.25.76 || ^4":
|
|
||||||
version "4.1.12"
|
version "4.1.12"
|
||||||
resolved "https://registry.yarnpkg.com/zod/-/zod-4.1.12.tgz#64f1ea53d00eab91853195653b5af9eee68970f0"
|
resolved "https://registry.yarnpkg.com/zod/-/zod-4.1.12.tgz#64f1ea53d00eab91853195653b5af9eee68970f0"
|
||||||
integrity sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==
|
integrity sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==
|
||||||
|
|||||||
Reference in New Issue
Block a user