feat(actions): limit urls & queries to 3

This commit is contained in:
ItzCrazyKns
2025-12-07 22:08:46 +05:30
parent d0124b9f06
commit 1c3a5fe275
2 changed files with 46 additions and 39 deletions

View File

@@ -6,52 +6,54 @@ import TurnDown from 'turndown';
const turndownService = new TurnDown();
const schema = z.object({
urls: z.array(z.string()).describe('A list of URLs to scrape content from.'),
urls: z.array(z.string()).describe('A list of URLs to scrape content from.'),
});
const scrapeURLAction: ResearchAction<typeof schema> = {
name: 'scrape_url',
description:
'Use after __plan to scrape and extract content from the provided URLs. This is useful when you need detailed information from specific web pages or if the user asks you to summarize or analyze content from certain links.',
schema: schema,
enabled: (_) => true,
execute: async (params, additionalConfig) => {
const results: Chunk[] = [];
name: 'scrape_url',
description:
'Use after __plan to scrape and extract content from the provided URLs. This is useful when you need detailed information from specific web pages or if the user asks you to summarize or analyze content from certain links. You can scrape maximum of 3 URLs.',
schema: schema,
enabled: (_) => true,
execute: async (params, additionalConfig) => {
params.urls = params.urls.slice(0, 3);
await Promise.all(
params.urls.map(async (url) => {
try {
const res = await fetch(url);
const text = await res.text();
const results: Chunk[] = [];
const title =
text.match(/<title>(.*?)<\/title>/i)?.[1] || `Content from ${url}`;
const markdown = turndownService.turndown(text);
await Promise.all(
params.urls.map(async (url) => {
try {
const res = await fetch(url);
const text = await res.text();
results.push({
content: markdown,
metadata: {
url,
title: title,
},
});
} catch (error) {
results.push({
content: `Failed to fetch content from ${url}: ${error}`,
metadata: {
url,
title: `Error fetching ${url}`,
},
});
}
}),
);
const title =
text.match(/<title>(.*?)<\/title>/i)?.[1] || `Content from ${url}`;
const markdown = turndownService.turndown(text);
return {
type: 'search_results',
results,
};
},
results.push({
content: markdown,
metadata: {
url,
title: title,
},
});
} catch (error) {
results.push({
content: `Failed to fetch content from ${url}: ${error}`,
metadata: {
url,
title: `Error fetching ${url}`,
},
});
}
}),
);
return {
type: 'search_results',
results,
};
},
};
export default scrapeURLAction;

View File

@@ -13,7 +13,10 @@ const actionSchema = z.object({
const actionDescription = `
Use immediately after the ___plan call when you need information. Default to using this unless you already have everything needed to finish. Provide 1-3 short, SEO-friendly queries (keywords, not sentences) that cover the user ask. Always prefer current/contextual queries (e.g., include year for news).
You can search maximum of 3 queries at a time.
For fast mode, you can only use this tool once so make sure to get all needed information in one go.
For balanced and quality modes, you can use this tool multiple times as needed.
In quality and balanced mode, first try to gather upper level information with broad queries, then use more specific queries based on what you find to find all information needed.
@@ -26,6 +29,8 @@ const webSearchAction: ResearchAction<typeof actionSchema> = {
enabled: (config) =>
config.classification.classification.skipSearch === false,
execute: async (input, _) => {
input.queries = input.queries.slice(0, 3);
let results: Chunk[] = [];
const search = async (q: string) => {