mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-18 15:21:33 +00:00
Compare commits
5 Commits
567c6a8758
...
feat/struc
Author | SHA1 | Date | |
---|---|---|---|
|
df33229934 | ||
|
49fafaa096 | ||
|
ca9b32a23b | ||
|
76e3ff4e02 | ||
|
eabf3ca7d3 |
@@ -81,7 +81,8 @@ export const POST = async (req: Request) => {
|
|||||||
if (body.chatModel?.provider === 'custom_openai') {
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
modelName: body.chatModel?.name || getCustomOpenaiModelName(),
|
modelName: body.chatModel?.name || getCustomOpenaiModelName(),
|
||||||
apiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
|
apiKey:
|
||||||
|
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL:
|
baseURL:
|
||||||
|
@@ -1,10 +1,7 @@
|
|||||||
export const POST = async (req: Request) => {
|
export const POST = async (req: Request) => {
|
||||||
try {
|
try {
|
||||||
const body: {
|
const body: { lat: number; lng: number; temperatureUnit: 'C' | 'F' } =
|
||||||
lat: number;
|
await req.json();
|
||||||
lng: number;
|
|
||||||
measureUnit: 'Imperial' | 'Metric';
|
|
||||||
} = await req.json();
|
|
||||||
|
|
||||||
if (!body.lat || !body.lng) {
|
if (!body.lat || !body.lng) {
|
||||||
return Response.json(
|
return Response.json(
|
||||||
@@ -16,9 +13,7 @@ export const POST = async (req: Request) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const res = await fetch(
|
const res = await fetch(
|
||||||
`https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${
|
`https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${body.temperatureUnit === 'C' ? '' : '&temperature_unit=fahrenheit'}`,
|
||||||
body.measureUnit === 'Metric' ? '' : '&temperature_unit=fahrenheit'
|
|
||||||
}${body.measureUnit === 'Metric' ? '' : '&wind_speed_unit=mph'}`,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const data = await res.json();
|
const data = await res.json();
|
||||||
@@ -40,15 +35,13 @@ export const POST = async (req: Request) => {
|
|||||||
windSpeed: number;
|
windSpeed: number;
|
||||||
icon: string;
|
icon: string;
|
||||||
temperatureUnit: 'C' | 'F';
|
temperatureUnit: 'C' | 'F';
|
||||||
windSpeedUnit: 'm/s' | 'mph';
|
|
||||||
} = {
|
} = {
|
||||||
temperature: data.current.temperature_2m,
|
temperature: data.current.temperature_2m,
|
||||||
condition: '',
|
condition: '',
|
||||||
humidity: data.current.relative_humidity_2m,
|
humidity: data.current.relative_humidity_2m,
|
||||||
windSpeed: data.current.wind_speed_10m,
|
windSpeed: data.current.wind_speed_10m,
|
||||||
icon: '',
|
icon: '',
|
||||||
temperatureUnit: body.measureUnit === 'Metric' ? 'C' : 'F',
|
temperatureUnit: body.temperatureUnit,
|
||||||
windSpeedUnit: body.measureUnit === 'Metric' ? 'm/s' : 'mph',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const code = data.current.weather_code;
|
const code = data.current.weather_code;
|
||||||
|
@@ -148,9 +148,7 @@ const Page = () => {
|
|||||||
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
||||||
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
||||||
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
||||||
const [measureUnit, setMeasureUnit] = useState<'Imperial' | 'Metric'>(
|
const [temperatureUnit, setTemperatureUnit] = useState<'C' | 'F'>('C');
|
||||||
'Metric',
|
|
||||||
);
|
|
||||||
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -213,9 +211,7 @@ const Page = () => {
|
|||||||
|
|
||||||
setSystemInstructions(localStorage.getItem('systemInstructions')!);
|
setSystemInstructions(localStorage.getItem('systemInstructions')!);
|
||||||
|
|
||||||
setMeasureUnit(
|
setTemperatureUnit(localStorage.getItem('temperatureUnit')! as 'C' | 'F');
|
||||||
localStorage.getItem('measureUnit')! as 'Imperial' | 'Metric',
|
|
||||||
);
|
|
||||||
|
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
@@ -375,8 +371,8 @@ const Page = () => {
|
|||||||
localStorage.setItem('embeddingModel', value);
|
localStorage.setItem('embeddingModel', value);
|
||||||
} else if (key === 'systemInstructions') {
|
} else if (key === 'systemInstructions') {
|
||||||
localStorage.setItem('systemInstructions', value);
|
localStorage.setItem('systemInstructions', value);
|
||||||
} else if (key === 'measureUnit') {
|
} else if (key === 'temperatureUnit') {
|
||||||
localStorage.setItem('measureUnit', value.toString());
|
localStorage.setItem('temperatureUnit', value.toString());
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Failed to save:', err);
|
console.error('Failed to save:', err);
|
||||||
@@ -434,22 +430,22 @@ const Page = () => {
|
|||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
Measurement Units
|
Temperature Unit
|
||||||
</p>
|
</p>
|
||||||
<Select
|
<Select
|
||||||
value={measureUnit ?? undefined}
|
value={temperatureUnit ?? undefined}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setMeasureUnit(e.target.value as 'Imperial' | 'Metric');
|
setTemperatureUnit(e.target.value as 'C' | 'F');
|
||||||
saveConfig('measureUnit', e.target.value);
|
saveConfig('temperatureUnit', e.target.value);
|
||||||
}}
|
}}
|
||||||
options={[
|
options={[
|
||||||
{
|
{
|
||||||
label: 'Metric',
|
label: 'Celsius',
|
||||||
value: 'Metric',
|
value: 'C',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Imperial',
|
label: 'Fahrenheit',
|
||||||
value: 'Imperial',
|
value: 'F',
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
|
@@ -354,11 +354,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
}
|
}
|
||||||
}, [isMessagesLoaded, isConfigReady]);
|
}, [isMessagesLoaded, isConfigReady]);
|
||||||
|
|
||||||
const sendMessage = async (
|
const sendMessage = async (message: string, messageId?: string) => {
|
||||||
message: string,
|
|
||||||
messageId?: string,
|
|
||||||
rewrite = false,
|
|
||||||
) => {
|
|
||||||
if (loading) return;
|
if (loading) return;
|
||||||
if (!isConfigReady) {
|
if (!isConfigReady) {
|
||||||
toast.error('Cannot send message before the configuration is ready');
|
toast.error('Cannot send message before the configuration is ready');
|
||||||
@@ -486,8 +482,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const messageIndex = messages.findIndex((m) => m.messageId === messageId);
|
|
||||||
|
|
||||||
const res = await fetch('/api/chat', {
|
const res = await fetch('/api/chat', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -504,9 +498,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
files: fileIds,
|
files: fileIds,
|
||||||
focusMode: focusMode,
|
focusMode: focusMode,
|
||||||
optimizationMode: optimizationMode,
|
optimizationMode: optimizationMode,
|
||||||
history: rewrite
|
history: chatHistory,
|
||||||
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
|
|
||||||
: chatHistory,
|
|
||||||
chatModel: {
|
chatModel: {
|
||||||
name: chatModelProvider.name,
|
name: chatModelProvider.name,
|
||||||
provider: chatModelProvider.provider,
|
provider: chatModelProvider.provider,
|
||||||
@@ -560,7 +552,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
|
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
|
||||||
});
|
});
|
||||||
|
|
||||||
sendMessage(message.content, message.messageId, true);
|
sendMessage(message.content, message.messageId);
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
@@ -21,16 +21,8 @@ import SearchVideos from './SearchVideos';
|
|||||||
import { useSpeech } from 'react-text-to-speech';
|
import { useSpeech } from 'react-text-to-speech';
|
||||||
import ThinkBox from './ThinkBox';
|
import ThinkBox from './ThinkBox';
|
||||||
|
|
||||||
const ThinkTagProcessor = ({
|
const ThinkTagProcessor = ({ children }: { children: React.ReactNode }) => {
|
||||||
children,
|
return <ThinkBox content={children as string} />;
|
||||||
thinkingEnded,
|
|
||||||
}: {
|
|
||||||
children: React.ReactNode;
|
|
||||||
thinkingEnded: boolean;
|
|
||||||
}) => {
|
|
||||||
return (
|
|
||||||
<ThinkBox content={children as string} thinkingEnded={thinkingEnded} />
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const MessageBox = ({
|
const MessageBox = ({
|
||||||
@@ -54,7 +46,6 @@ const MessageBox = ({
|
|||||||
}) => {
|
}) => {
|
||||||
const [parsedMessage, setParsedMessage] = useState(message.content);
|
const [parsedMessage, setParsedMessage] = useState(message.content);
|
||||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||||
const [thinkingEnded, setThinkingEnded] = useState(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const citationRegex = /\[([^\]]+)\]/g;
|
const citationRegex = /\[([^\]]+)\]/g;
|
||||||
@@ -70,10 +61,6 @@ const MessageBox = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (message.role === 'assistant' && message.content.includes('</think>')) {
|
|
||||||
setThinkingEnded(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
message.role === 'assistant' &&
|
message.role === 'assistant' &&
|
||||||
message?.sources &&
|
message?.sources &&
|
||||||
@@ -101,7 +88,7 @@ const MessageBox = ({
|
|||||||
if (url) {
|
if (url) {
|
||||||
return `<a href="${url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${numStr}</a>`;
|
return `<a href="${url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${numStr}</a>`;
|
||||||
} else {
|
} else {
|
||||||
return ``;
|
return `[${numStr}]`;
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.join('');
|
.join('');
|
||||||
@@ -112,14 +99,6 @@ const MessageBox = ({
|
|||||||
);
|
);
|
||||||
setSpeechMessage(message.content.replace(regex, ''));
|
setSpeechMessage(message.content.replace(regex, ''));
|
||||||
return;
|
return;
|
||||||
} else if (
|
|
||||||
message.role === 'assistant' &&
|
|
||||||
message?.sources &&
|
|
||||||
message.sources.length === 0
|
|
||||||
) {
|
|
||||||
setParsedMessage(processedMessage.replace(regex, ''));
|
|
||||||
setSpeechMessage(message.content.replace(regex, ''));
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setSpeechMessage(message.content.replace(regex, ''));
|
setSpeechMessage(message.content.replace(regex, ''));
|
||||||
@@ -132,9 +111,6 @@ const MessageBox = ({
|
|||||||
overrides: {
|
overrides: {
|
||||||
think: {
|
think: {
|
||||||
component: ThinkTagProcessor,
|
component: ThinkTagProcessor,
|
||||||
props: {
|
|
||||||
thinkingEnded: thinkingEnded,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@@ -1,23 +1,15 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { useEffect, useState } from 'react';
|
import { useState } from 'react';
|
||||||
|
import { cn } from '@/lib/utils';
|
||||||
import { ChevronDown, ChevronUp, BrainCircuit } from 'lucide-react';
|
import { ChevronDown, ChevronUp, BrainCircuit } from 'lucide-react';
|
||||||
|
|
||||||
interface ThinkBoxProps {
|
interface ThinkBoxProps {
|
||||||
content: string;
|
content: string;
|
||||||
thinkingEnded: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const ThinkBox = ({ content, thinkingEnded }: ThinkBoxProps) => {
|
const ThinkBox = ({ content }: ThinkBoxProps) => {
|
||||||
const [isExpanded, setIsExpanded] = useState(true);
|
const [isExpanded, setIsExpanded] = useState(false);
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (thinkingEnded) {
|
|
||||||
setIsExpanded(false);
|
|
||||||
} else {
|
|
||||||
setIsExpanded(true);
|
|
||||||
}
|
|
||||||
}, [thinkingEnded]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="my-4 bg-light-secondary/50 dark:bg-dark-secondary/50 rounded-xl border border-light-200 dark:border-dark-200 overflow-hidden">
|
<div className="my-4 bg-light-secondary/50 dark:bg-dark-secondary/50 rounded-xl border border-light-200 dark:border-dark-200 overflow-hidden">
|
||||||
|
@@ -10,7 +10,6 @@ const WeatherWidget = () => {
|
|||||||
windSpeed: 0,
|
windSpeed: 0,
|
||||||
icon: '',
|
icon: '',
|
||||||
temperatureUnit: 'C',
|
temperatureUnit: 'C',
|
||||||
windSpeedUnit: 'm/s',
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@@ -76,7 +75,7 @@ const WeatherWidget = () => {
|
|||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
lat: location.latitude,
|
lat: location.latitude,
|
||||||
lng: location.longitude,
|
lng: location.longitude,
|
||||||
measureUnit: localStorage.getItem('measureUnit') ?? 'Metric',
|
temperatureUnit: localStorage.getItem('temperatureUnit') ?? 'C',
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -96,7 +95,6 @@ const WeatherWidget = () => {
|
|||||||
windSpeed: data.windSpeed,
|
windSpeed: data.windSpeed,
|
||||||
icon: data.icon,
|
icon: data.icon,
|
||||||
temperatureUnit: data.temperatureUnit,
|
temperatureUnit: data.temperatureUnit,
|
||||||
windSpeedUnit: data.windSpeedUnit,
|
|
||||||
});
|
});
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
});
|
});
|
||||||
@@ -141,7 +139,7 @@ const WeatherWidget = () => {
|
|||||||
</span>
|
</span>
|
||||||
<span className="flex items-center text-xs text-black/60 dark:text-white/60">
|
<span className="flex items-center text-xs text-black/60 dark:text-white/60">
|
||||||
<Wind className="w-3 h-3 mr-1" />
|
<Wind className="w-3 h-3 mr-1" />
|
||||||
{data.windSpeed} {data.windSpeedUnit}
|
{data.windSpeed} km/h
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<span className="text-xs text-black/60 dark:text-white/60 mt-1">
|
<span className="text-xs text-black/60 dark:text-white/60 mt-1">
|
||||||
|
@@ -3,18 +3,32 @@ import {
|
|||||||
RunnableMap,
|
RunnableMap,
|
||||||
RunnableLambda,
|
RunnableLambda,
|
||||||
} from '@langchain/core/runnables';
|
} from '@langchain/core/runnables';
|
||||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
import { PromptTemplate } from '@langchain/core/prompts';
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../searxng';
|
import { searchSearxng } from '../searxng';
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
|
||||||
|
|
||||||
const imageSearchChainPrompt = `
|
const imageSearchChainPrompt = `
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
|
||||||
|
Example:
|
||||||
|
1. Follow up question: What is a cat?
|
||||||
|
Rephrased: A cat
|
||||||
|
|
||||||
|
2. Follow up question: What is a car? How does it works?
|
||||||
|
Rephrased: Car working
|
||||||
|
|
||||||
|
3. Follow up question: How does an AC work?
|
||||||
|
Rephrased: AC working
|
||||||
|
|
||||||
|
Conversation:
|
||||||
|
{chat_history}
|
||||||
|
|
||||||
|
Follow up question: {query}
|
||||||
|
Rephrased question:
|
||||||
`;
|
`;
|
||||||
|
|
||||||
type ImageSearchChainInput = {
|
type ImageSearchChainInput = {
|
||||||
@@ -40,39 +54,12 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||||||
return input.query;
|
return input.query;
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
ChatPromptTemplate.fromMessages([
|
PromptTemplate.fromTemplate(imageSearchChainPrompt),
|
||||||
['system', imageSearchChainPrompt],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>A cat</query>'],
|
|
||||||
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>Car working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>AC working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
|
|
||||||
],
|
|
||||||
]),
|
|
||||||
llm,
|
llm,
|
||||||
strParser,
|
strParser,
|
||||||
RunnableLambda.from(async (input: string) => {
|
RunnableLambda.from(async (input: string) => {
|
||||||
const queryParser = new LineOutputParser({
|
input = input.replace(/<think>.*?<\/think>/g, '');
|
||||||
key: 'query',
|
|
||||||
});
|
|
||||||
|
|
||||||
return await queryParser.parse(input);
|
|
||||||
}),
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const res = await searchSearxng(input, {
|
const res = await searchSearxng(input, {
|
||||||
engines: ['bing images', 'google images'],
|
engines: ['bing images', 'google images'],
|
||||||
});
|
});
|
||||||
|
@@ -3,19 +3,33 @@ import {
|
|||||||
RunnableMap,
|
RunnableMap,
|
||||||
RunnableLambda,
|
RunnableLambda,
|
||||||
} from '@langchain/core/runnables';
|
} from '@langchain/core/runnables';
|
||||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
import { PromptTemplate } from '@langchain/core/prompts';
|
||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import { searchSearxng } from '../searxng';
|
import { searchSearxng } from '../searxng';
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import LineOutputParser from '../outputParsers/lineOutputParser';
|
|
||||||
|
|
||||||
const videoSearchChainPrompt = `
|
const VideoSearchChainPrompt = `
|
||||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||||
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
|
||||||
Output only the rephrased query wrapped in an XML <query> element. Do not include any explanation or additional text.
|
|
||||||
`;
|
Example:
|
||||||
|
1. Follow up question: How does a car work?
|
||||||
|
Rephrased: How does a car work?
|
||||||
|
|
||||||
|
2. Follow up question: What is the theory of relativity?
|
||||||
|
Rephrased: What is theory of relativity
|
||||||
|
|
||||||
|
3. Follow up question: How does an AC work?
|
||||||
|
Rephrased: How does an AC work
|
||||||
|
|
||||||
|
Conversation:
|
||||||
|
{chat_history}
|
||||||
|
|
||||||
|
Follow up question: {query}
|
||||||
|
Rephrased question:
|
||||||
|
`;
|
||||||
|
|
||||||
type VideoSearchChainInput = {
|
type VideoSearchChainInput = {
|
||||||
chat_history: BaseMessage[];
|
chat_history: BaseMessage[];
|
||||||
@@ -41,37 +55,12 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||||||
return input.query;
|
return input.query;
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
ChatPromptTemplate.fromMessages([
|
PromptTemplate.fromTemplate(VideoSearchChainPrompt),
|
||||||
['system', videoSearchChainPrompt],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>How does a car work?</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>Theory of relativity</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
|
|
||||||
],
|
|
||||||
['assistant', '<query>AC working</query>'],
|
|
||||||
[
|
|
||||||
'user',
|
|
||||||
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
|
|
||||||
],
|
|
||||||
]),
|
|
||||||
llm,
|
llm,
|
||||||
strParser,
|
strParser,
|
||||||
RunnableLambda.from(async (input: string) => {
|
RunnableLambda.from(async (input: string) => {
|
||||||
const queryParser = new LineOutputParser({
|
input = input.replace(/<think>.*?<\/think>/g, '');
|
||||||
key: 'query',
|
|
||||||
});
|
|
||||||
return await queryParser.parse(input);
|
|
||||||
}),
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const res = await searchSearxng(input, {
|
const res = await searchSearxng(input, {
|
||||||
engines: ['youtube'],
|
engines: ['youtube'],
|
||||||
});
|
});
|
||||||
@@ -103,8 +92,8 @@ const handleVideoSearch = (
|
|||||||
input: VideoSearchChainInput,
|
input: VideoSearchChainInput,
|
||||||
llm: BaseChatModel,
|
llm: BaseChatModel,
|
||||||
) => {
|
) => {
|
||||||
const videoSearchChain = createVideoSearchChain(llm);
|
const VideoSearchChain = createVideoSearchChain(llm);
|
||||||
return videoSearchChain.invoke(input);
|
return VideoSearchChain.invoke(input);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default handleVideoSearch;
|
export default handleVideoSearch;
|
||||||
|
@@ -1,63 +1,41 @@
|
|||||||
export const webSearchRetrieverPrompt = `
|
export const webSearchRetrieverPrompt = `
|
||||||
You are an AI question rephraser. You will be given a conversation and a follow-up question, you will have to rephrase the follow up question so it is a standalone question and can be used by another LLM to search the web for information to answer it.
|
You are an AI question rephraser. You will be given a conversation and a follow-up question; rephrase it into a standalone question that another LLM can use to search the web.
|
||||||
If it is a simple writing task or a greeting (unless the greeting contains a question after it) like Hi, Hello, How are you, etc. than a question then you need to return \`not_needed\` as the response (This is because the LLM won't need to search the web for finding information on this topic).
|
|
||||||
If the user asks some question from some URL or wants you to summarize a PDF or a webpage (via URL) you need to return the links inside the \`links\` XML block and the question inside the \`question\` XML block. If the user wants to you to summarize the webpage or the PDF you need to return \`summarize\` inside the \`question\` XML block in place of a question and the link to summarize in the \`links\` XML block.
|
|
||||||
You must always return the rephrased question inside the \`question\` XML block, if there are no links in the follow-up question then don't insert a \`links\` XML block in your response.
|
|
||||||
|
|
||||||
There are several examples attached for your reference inside the below \`examples\` XML block
|
Return ONLY a JSON object that matches this schema:
|
||||||
|
query: string // the standalone question (or "summarize")
|
||||||
|
links: string[] // URLs extracted from the user query (empty if none)
|
||||||
|
searchRequired: boolean // true if web search is needed, false for greetings/simple writing tasks
|
||||||
|
searchMode: "" | "normal" | "news" // "" when searchRequired is false; "news" if the user asks for news/articles, otherwise "normal"
|
||||||
|
|
||||||
<examples>
|
Rules
|
||||||
1. Follow up question: What is the capital of France
|
- Greetings / simple writing tasks → query:"", links:[], searchRequired:false, searchMode:""
|
||||||
Rephrased question:\`
|
- Summarizing a URL → query:"summarize", links:[url...], searchRequired:true, searchMode:"normal"
|
||||||
<question>
|
- Asking for news/articles → searchMode:"news"
|
||||||
Capital of france
|
|
||||||
</question>
|
Examples
|
||||||
\`
|
1. Follow-up: What is the capital of France?
|
||||||
|
"query":"capital of France","links":[],"searchRequired":true,"searchMode":"normal"
|
||||||
|
|
||||||
2. Hi, how are you?
|
2. Hi, how are you?
|
||||||
Rephrased question\`
|
"query":"","links":[],"searchRequired":false,"searchMode":""
|
||||||
<question>
|
|
||||||
not_needed
|
|
||||||
</question>
|
|
||||||
\`
|
|
||||||
|
|
||||||
3. Follow up question: What is Docker?
|
3. Follow-up: What is Docker?
|
||||||
Rephrased question: \`
|
"query":"what is Docker","links":[],"searchRequired":true,"searchMode":"normal"
|
||||||
<question>
|
|
||||||
What is Docker
|
|
||||||
</question>
|
|
||||||
\`
|
|
||||||
|
|
||||||
4. Follow up question: Can you tell me what is X from https://example.com
|
4. Follow-up: Can you tell me what is X from https://example.com?
|
||||||
Rephrased question: \`
|
"query":"what is X","links":["https://example.com"],"searchRequired":true,"searchMode":"normal"
|
||||||
<question>
|
|
||||||
Can you tell me what is X?
|
|
||||||
</question>
|
|
||||||
|
|
||||||
<links>
|
5. Follow-up: Summarize the content from https://example.com
|
||||||
https://example.com
|
"query":"summarize","links":["https://example.com"],"searchRequired":true,"searchMode":"normal"
|
||||||
</links>
|
|
||||||
\`
|
|
||||||
|
|
||||||
5. Follow up question: Summarize the content from https://example.com
|
6. Follow-up: Latest news about AI
|
||||||
Rephrased question: \`
|
"query":"latest news about AI","links":[],"searchRequired":true,"searchMode":"news"
|
||||||
<question>
|
|
||||||
summarize
|
|
||||||
</question>
|
|
||||||
|
|
||||||
<links>
|
|
||||||
https://example.com
|
|
||||||
</links>
|
|
||||||
\`
|
|
||||||
</examples>
|
|
||||||
|
|
||||||
Anything below is the part of the actual conversation and you need to use conversation and the follow-up question to rephrase the follow-up question as a standalone question based on the guidelines shared above.
|
|
||||||
|
|
||||||
<conversation>
|
<conversation>
|
||||||
{chat_history}
|
{chat_history}
|
||||||
</conversation>
|
</conversation>
|
||||||
|
|
||||||
Follow up question: {query}
|
Follow-up question: {query}
|
||||||
Rephrased question:
|
Rephrased question:
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
@@ -14,12 +14,16 @@ import { Embeddings } from '@langchain/core/embeddings';
|
|||||||
|
|
||||||
const geminiChatModels: Record<string, string>[] = [
|
const geminiChatModels: Record<string, string>[] = [
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.5 Flash',
|
displayName: 'Gemini 2.5 Flash Preview 05-20',
|
||||||
key: 'gemini-2.5-flash',
|
key: 'gemini-2.5-flash-preview-05-20',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.5 Pro',
|
displayName: 'Gemini 2.5 Pro Preview',
|
||||||
key: 'gemini-2.5-pro',
|
key: 'gemini-2.5-pro-preview-05-06',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Gemini 2.5 Pro Experimental',
|
||||||
|
key: 'gemini-2.5-pro-preview-05-06',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.0 Flash',
|
displayName: 'Gemini 2.0 Flash',
|
||||||
@@ -71,7 +75,7 @@ export const loadGeminiChatModels = async () => {
|
|||||||
displayName: model.displayName,
|
displayName: model.displayName,
|
||||||
model: new ChatGoogleGenerativeAI({
|
model: new ChatGoogleGenerativeAI({
|
||||||
apiKey: geminiApiKey,
|
apiKey: geminiApiKey,
|
||||||
model: model.key,
|
modelName: model.key,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}) as unknown as BaseChatModel,
|
}) as unknown as BaseChatModel,
|
||||||
};
|
};
|
||||||
@@ -104,7 +108,7 @@ export const loadGeminiEmbeddingModels = async () => {
|
|||||||
|
|
||||||
return embeddingModels;
|
return embeddingModels;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error loading Gemini embeddings models: ${err}`);
|
console.error(`Error loading OpenAI embeddings models: ${err}`);
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@@ -24,6 +24,7 @@ import computeSimilarity from '../utils/computeSimilarity';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import eventEmitter from 'events';
|
import eventEmitter from 'events';
|
||||||
import { StreamEvent } from '@langchain/core/tracers/log_stream';
|
import { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
export interface MetaSearchAgentType {
|
export interface MetaSearchAgentType {
|
||||||
searchAndAnswer: (
|
searchAndAnswer: (
|
||||||
@@ -52,6 +53,17 @@ type BasicChainInput = {
|
|||||||
query: string;
|
query: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const retrieverLLMOutputSchema = z.object({
|
||||||
|
query: z.string().describe('The query to search the web for.'),
|
||||||
|
links: z
|
||||||
|
.array(z.string())
|
||||||
|
.describe('The links to search/summarize if present'),
|
||||||
|
searchRequired: z
|
||||||
|
.boolean()
|
||||||
|
.describe('Wether there is a need to search the web'),
|
||||||
|
searchMode: z.enum(['', 'normal', 'news']).describe('The search mode.'),
|
||||||
|
});
|
||||||
|
|
||||||
class MetaSearchAgent implements MetaSearchAgentType {
|
class MetaSearchAgent implements MetaSearchAgentType {
|
||||||
private config: Config;
|
private config: Config;
|
||||||
private strParser = new StringOutputParser();
|
private strParser = new StringOutputParser();
|
||||||
@@ -62,26 +74,24 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
private async createSearchRetrieverChain(llm: BaseChatModel) {
|
private async createSearchRetrieverChain(llm: BaseChatModel) {
|
||||||
(llm as unknown as ChatOpenAI).temperature = 0;
|
(llm as unknown as ChatOpenAI).temperature = 0;
|
||||||
|
|
||||||
return RunnableSequence.from([
|
return RunnableSequence.from([
|
||||||
PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt),
|
PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt),
|
||||||
|
Object.assign(
|
||||||
|
Object.create(Object.getPrototypeOf(llm)),
|
||||||
llm,
|
llm,
|
||||||
this.strParser,
|
).withStructuredOutput(retrieverLLMOutputSchema, {
|
||||||
RunnableLambda.from(async (input: string) => {
|
...(llm.metadata?.['model-type'] === 'groq'
|
||||||
const linksOutputParser = new LineListOutputParser({
|
? {
|
||||||
key: 'links',
|
method: 'json-object',
|
||||||
});
|
}
|
||||||
|
: {}),
|
||||||
|
}),
|
||||||
|
RunnableLambda.from(
|
||||||
|
async (input: z.infer<typeof retrieverLLMOutputSchema>) => {
|
||||||
|
let question = input.query;
|
||||||
|
const links = input.links;
|
||||||
|
|
||||||
const questionOutputParser = new LineOutputParser({
|
if (!input.searchRequired) {
|
||||||
key: 'question',
|
|
||||||
});
|
|
||||||
|
|
||||||
const links = await linksOutputParser.parse(input);
|
|
||||||
let question = this.config.summarizer
|
|
||||||
? await questionOutputParser.parse(input)
|
|
||||||
: input;
|
|
||||||
|
|
||||||
if (question === 'not_needed') {
|
|
||||||
return { query: '', docs: [] };
|
return { query: '', docs: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -207,7 +217,10 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
const res = await searchSearxng(question, {
|
const res = await searchSearxng(question, {
|
||||||
language: 'en',
|
language: 'en',
|
||||||
engines: this.config.activeEngines,
|
engines:
|
||||||
|
input.searchMode === 'normal'
|
||||||
|
? this.config.activeEngines
|
||||||
|
: ['bing news'],
|
||||||
});
|
});
|
||||||
|
|
||||||
const documents = res.results.map(
|
const documents = res.results.map(
|
||||||
@@ -228,7 +241,8 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
return { query: question, docs: documents };
|
return { query: question, docs: documents };
|
||||||
}
|
}
|
||||||
}),
|
},
|
||||||
|
),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,11 +1,8 @@
|
|||||||
import { BaseMessage, isAIMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
|
|
||||||
const formatChatHistoryAsString = (history: BaseMessage[]) => {
|
const formatChatHistoryAsString = (history: BaseMessage[]) => {
|
||||||
return history
|
return history
|
||||||
.map(
|
.map((message) => `${message._getType()}: ${message.content}`)
|
||||||
(message) =>
|
|
||||||
`${isAIMessage(message) ? 'AI' : 'User'}: ${message.content}`,
|
|
||||||
)
|
|
||||||
.join('\n');
|
.join('\n');
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user