diff --git a/apps/api/src/lib/crawl-redis.ts b/apps/api/src/lib/crawl-redis.ts index b5936ad6..bd79a86d 100644 --- a/apps/api/src/lib/crawl-redis.ts +++ b/apps/api/src/lib/crawl-redis.ts @@ -166,10 +166,10 @@ export async function lockURLs(id: string, sc: StoredCrawl, urls: string[]): Pro return res; } -export function crawlToCrawler(id: string, sc: StoredCrawl): WebCrawler { +export function crawlToCrawler(id: string, sc: StoredCrawl, initialUrl?: string): WebCrawler { const crawler = new WebCrawler({ jobId: id, - initialUrl: sc.originUrl!, + initialUrl: initialUrl ?? sc.originUrl!, includes: sc.crawlerOptions?.includes ?? [], excludes: sc.crawlerOptions?.excludes ?? [], maxCrawledLinks: sc.crawlerOptions?.maxCrawledLinks ?? 1000, diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index 9e3f7cd2..e5a25f37 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -171,7 +171,7 @@ export class WebCrawler { let fullUrl = href; if (!href.startsWith("http")) { try { - fullUrl = new URL(href, this.baseUrl).toString(); + fullUrl = new URL(href, url).toString(); } catch (_) { return null; } diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index 428e7e01..5a0b28db 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -352,10 +352,10 @@ async function processJob(job: Job & { id: string }, token: string) { if (!job.data.sitemapped && job.data.crawlerOptions !== null) { if (!sc.cancelled) { - const crawler = crawlToCrawler(job.data.crawl_id, sc); + const crawler = crawlToCrawler(job.data.crawl_id, sc, doc.metadata?.url ?? doc.metadata?.sourceURL ?? undefined); const links = crawler.filterLinks( - crawler.extractLinksFromHTML(rawHtml ?? "", sc.originUrl as string), + crawler.extractLinksFromHTML(rawHtml ?? "", doc.metadata?.url ?? doc.metadata?.sourceURL ?? sc.originUrl as string), Infinity, sc.crawlerOptions?.maxDepth ?? 10 );