fix(v0): crawl timeout errors

This commit is contained in:
Gergő Móricz 2024-11-12 19:46:00 +01:00
parent fbabc779f5
commit f2ecf0cc36
2 changed files with 3 additions and 1 deletions

View File

@ -75,7 +75,7 @@ export async function crawlStatusController(req: Request, res: Response) {
const jobStatus = sc.cancelled ? "failed" : jobStatuses.every(x => x === "completed") ? "completed" : "active"; const jobStatus = sc.cancelled ? "failed" : jobStatuses.every(x => x === "completed") ? "completed" : "active";
const data = jobs.filter(x => x.failedReason !== "Concurreny limit hit").map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue); const data = jobs.filter(x => x.failedReason !== "Concurreny limit hit" && x.returnvalue !== null).map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue);
if ( if (
jobs.length > 0 && jobs.length > 0 &&

View File

@ -138,6 +138,8 @@ export async function crawlController(req: Request, res: Response) {
const { scrapeOptions, internalOptions } = fromLegacyScrapeOptions(pageOptions, undefined, undefined); const { scrapeOptions, internalOptions } = fromLegacyScrapeOptions(pageOptions, undefined, undefined);
delete (scrapeOptions as any).timeout;
const sc: StoredCrawl = { const sc: StoredCrawl = {
originUrl: url, originUrl: url,
crawlerOptions, crawlerOptions,