feat(chains): remove unused imports

This commit is contained in:
ItzCrazyKns
2025-07-19 17:57:32 +05:30
parent b48b0eeb0e
commit 7c4aa683a2
2 changed files with 28 additions and 46 deletions

View File

@ -3,7 +3,7 @@ import {
RunnableMap, RunnableMap,
RunnableLambda, RunnableLambda,
} from '@langchain/core/runnables'; } from '@langchain/core/runnables';
import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts'; import { ChatPromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages'; import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers'; import { StringOutputParser } from '@langchain/core/output_parsers';
@ -43,43 +43,34 @@ const createImageSearchChain = (llm: BaseChatModel) => {
ChatPromptTemplate.fromMessages([ ChatPromptTemplate.fromMessages([
['system', imageSearchChainPrompt], ['system', imageSearchChainPrompt],
[ [
"user", 'user',
"<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>" '<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
],
[
"assistant",
"<query>A cat</query>"
], ],
['assistant', '<query>A cat</query>'],
[ [
"user", 'user',
"<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>" '<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
],
[
"assistant",
"<query>Car working</query>"
],
[
"user",
"<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>"
],
[
"assistant",
"<query>AC working</query>"
], ],
['assistant', '<query>Car working</query>'],
[ [
'user', 'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>' '<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
] ],
['assistant', '<query>AC working</query>'],
[
'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
],
]), ]),
llm, llm,
strParser, strParser,
RunnableLambda.from(async (input: string) => { RunnableLambda.from(async (input: string) => {
const queryParser = new LineOutputParser({ const queryParser = new LineOutputParser({
key: 'query' key: 'query',
}) });
return (await queryParser.parse(input)) return await queryParser.parse(input);
}), }),
RunnableLambda.from(async (input: string) => { RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, { const res = await searchSearxng(input, {

View File

@ -3,7 +3,7 @@ import {
RunnableMap, RunnableMap,
RunnableLambda, RunnableLambda,
} from '@langchain/core/runnables'; } from '@langchain/core/runnables';
import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts'; import { ChatPromptTemplate } from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages'; import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers'; import { StringOutputParser } from '@langchain/core/output_parsers';
@ -45,40 +45,31 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
['system', videoSearchChainPrompt], ['system', videoSearchChainPrompt],
[ [
'user', 'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>' '<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
],
[
'assistant',
'<query>How does a car work?</query>'
], ],
['assistant', '<query>How does a car work?</query>'],
[ [
'user', 'user',
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>' '<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
],
[
'assistant',
'<query>Theory of relativity</query>'
], ],
['assistant', '<query>Theory of relativity</query>'],
[ [
'user', 'user',
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>' '<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
],
[
'assistant',
'<query>AC working</query>'
], ],
['assistant', '<query>AC working</query>'],
[ [
'user', 'user',
'<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>' '<conversation>{chat_history}</conversation>\n<follow_up>\n{query}\n</follow_up>',
] ],
]), ]),
llm, llm,
strParser, strParser,
RunnableLambda.from(async (input: string) => { RunnableLambda.from(async (input: string) => {
const queryParser = new LineOutputParser({ const queryParser = new LineOutputParser({
key: 'query' key: 'query',
}); });
return (await queryParser.parse(input)); return await queryParser.parse(input);
}), }),
RunnableLambda.from(async (input: string) => { RunnableLambda.from(async (input: string) => {
const res = await searchSearxng(input, { const res = await searchSearxng(input, {