From ba2af74adf6f174c5bb7bc8fe32fd128e7c16e6e Mon Sep 17 00:00:00 2001 From: Kevin Swiber Date: Fri, 9 Aug 2024 15:29:18 -0700 Subject: [PATCH 01/31] Ensuring USE_DB_AUTHENTICATION is true in single URL scraper. --- apps/api/src/scraper/WebScraper/single_url.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index 12e075fd..96adcf49 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -23,11 +23,13 @@ import { clientSideError } from "../../strings"; dotenv.config(); +const useDatabaseAuth = process.env.USE_DB_AUTHENTICATION === "true"; + export const baseScrapers = [ "fire-engine", "fire-engine;chrome-cdp", "scrapingBee", - process.env.USE_DB_AUTHENTICATION ? undefined : "playwright", + useDatabaseAuth ? undefined : "playwright", "scrapingBeeLoad", "fetch", ].filter(Boolean); @@ -85,10 +87,10 @@ function getScrapingFallbackOrder( }); let defaultOrder = [ - !process.env.USE_DB_AUTHENTICATION ? undefined : "fire-engine", - !process.env.USE_DB_AUTHENTICATION ? undefined : "fire-engine;chrome-cdp", + !useDatabaseAuth ? undefined : "fire-engine", + !useDatabaseAuth ? undefined : "fire-engine;chrome-cdp", "scrapingBee", - process.env.USE_DB_AUTHENTICATION ? undefined : "playwright", + useDatabaseAuth ? undefined : "playwright", "scrapingBeeLoad", "fetch", ].filter(Boolean); @@ -96,7 +98,7 @@ function getScrapingFallbackOrder( if (isWaitPresent || isScreenshotPresent || isHeadersPresent) { defaultOrder = [ "fire-engine", - process.env.USE_DB_AUTHENTICATION ? undefined : "playwright", + useDatabaseAuth ? undefined : "playwright", ...defaultOrder.filter( (scraper) => scraper !== "fire-engine" && scraper !== "playwright" ), From bbed6ef23d325d4c7fba2774bd760513aa2a75ce Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Mon, 12 Aug 2024 14:20:41 -0300 Subject: [PATCH 02/31] added validation on every USE_DB_AUTHENTICATION call --- apps/api/src/controllers/crawl-status.ts | 3 ++- apps/api/src/controllers/status.ts | 3 ++- apps/api/src/lib/logger.ts | 3 ++- apps/api/src/lib/scrape-events.ts | 3 ++- apps/api/src/lib/withAuth.ts | 3 ++- apps/api/src/main/runWebScraper.ts | 3 ++- apps/api/src/services/logging/crawl_log.ts | 3 ++- apps/api/src/services/logging/log_job.ts | 3 ++- apps/api/src/services/logging/scrape_log.ts | 3 ++- apps/api/src/services/supabase.ts | 3 ++- apps/test-suite/utils/supabase.ts | 6 ++++-- 11 files changed, 24 insertions(+), 12 deletions(-) diff --git a/apps/api/src/controllers/crawl-status.ts b/apps/api/src/controllers/crawl-status.ts index 5aafa433..82a49db8 100644 --- a/apps/api/src/controllers/crawl-status.ts +++ b/apps/api/src/controllers/crawl-status.ts @@ -24,7 +24,8 @@ export async function crawlStatusController(req: Request, res: Response) { const { current, current_url, total, current_step, partialDocs } = await job.progress(); let data = job.returnvalue; - if (process.env.USE_DB_AUTHENTICATION === "true") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (useDbAuthentication) { const supabaseData = await supabaseGetJobById(req.params.jobId); if (supabaseData) { diff --git a/apps/api/src/controllers/status.ts b/apps/api/src/controllers/status.ts index 3d7fccbb..935338bd 100644 --- a/apps/api/src/controllers/status.ts +++ b/apps/api/src/controllers/status.ts @@ -12,7 +12,8 @@ export async function crawlJobStatusPreviewController(req: Request, res: Respons const { current, current_url, total, current_step, partialDocs } = await job.progress(); let data = job.returnvalue; - if (process.env.USE_DB_AUTHENTICATION === "true") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (useDbAuthentication) { const supabaseData = await supabaseGetJobById(req.params.jobId); if (supabaseData) { diff --git a/apps/api/src/lib/logger.ts b/apps/api/src/lib/logger.ts index 872dbf51..fb0468c2 100644 --- a/apps/api/src/lib/logger.ts +++ b/apps/api/src/lib/logger.ts @@ -25,7 +25,8 @@ export class Logger { const color = Logger.colors[level]; console[level.toLowerCase()](color, `[${new Date().toISOString()}]${level} - ${message}`); - // if (process.env.USE_DB_AUTH) { + // const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + // if (useDbAuthentication) { // save to supabase? another place? // supabase.from('logs').insert({ level: level, message: message, timestamp: new Date().toISOString(), success: boolean }); // } diff --git a/apps/api/src/lib/scrape-events.ts b/apps/api/src/lib/scrape-events.ts index 8d677279..02ef670b 100644 --- a/apps/api/src/lib/scrape-events.ts +++ b/apps/api/src/lib/scrape-events.ts @@ -36,7 +36,8 @@ export class ScrapeEvents { static async insert(jobId: string, content: ScrapeEvent) { if (jobId === "TEST") return null; - if (process.env.USE_DB_AUTHENTICATION) { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (useDbAuthentication) { try { const result = await supabase.from("scrape_events").insert({ job_id: jobId, diff --git a/apps/api/src/lib/withAuth.ts b/apps/api/src/lib/withAuth.ts index 353c144b..1979907e 100644 --- a/apps/api/src/lib/withAuth.ts +++ b/apps/api/src/lib/withAuth.ts @@ -7,7 +7,8 @@ export function withAuth( originalFunction: (...args: U) => Promise ) { return async function (...args: U): Promise { - if (process.env.USE_DB_AUTHENTICATION === "false") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (!useDbAuthentication) { if (warningCount < 5) { Logger.warn("You're bypassing authentication"); warningCount++; diff --git a/apps/api/src/main/runWebScraper.ts b/apps/api/src/main/runWebScraper.ts index 76665aa2..3f3293b2 100644 --- a/apps/api/src/main/runWebScraper.ts +++ b/apps/api/src/main/runWebScraper.ts @@ -123,7 +123,8 @@ export async function runWebScraper({ const saveJob = async (job: Job, result: any) => { try { - if (process.env.USE_DB_AUTHENTICATION === "true") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (useDbAuthentication) { const { data, error } = await supabase_service .from("firecrawl_jobs") .update({ docs: result }) diff --git a/apps/api/src/services/logging/crawl_log.ts b/apps/api/src/services/logging/crawl_log.ts index 68008e02..f19b0297 100644 --- a/apps/api/src/services/logging/crawl_log.ts +++ b/apps/api/src/services/logging/crawl_log.ts @@ -3,7 +3,8 @@ import { Logger } from "../../../src/lib/logger"; import "dotenv/config"; export async function logCrawl(job_id: string, team_id: string) { - if (process.env.USE_DB_AUTHENTICATION === 'true') { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (useDbAuthentication) { try { const { data, error } = await supabase_service .from("bulljobs_teams") diff --git a/apps/api/src/services/logging/log_job.ts b/apps/api/src/services/logging/log_job.ts index 93d0b311..2525917c 100644 --- a/apps/api/src/services/logging/log_job.ts +++ b/apps/api/src/services/logging/log_job.ts @@ -7,7 +7,8 @@ import { Logger } from "../../lib/logger"; export async function logJob(job: FirecrawlJob) { try { - if (process.env.USE_DB_AUTHENTICATION === "false") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (!useDbAuthentication) { return; } diff --git a/apps/api/src/services/logging/scrape_log.ts b/apps/api/src/services/logging/scrape_log.ts index 099e4a0b..30d8fd1e 100644 --- a/apps/api/src/services/logging/scrape_log.ts +++ b/apps/api/src/services/logging/scrape_log.ts @@ -8,7 +8,8 @@ export async function logScrape( scrapeLog: ScrapeLog, pageOptions?: PageOptions ) { - if (process.env.USE_DB_AUTHENTICATION === "false") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (!useDbAuthentication) { Logger.debug("Skipping logging scrape to Supabase"); return; } diff --git a/apps/api/src/services/supabase.ts b/apps/api/src/services/supabase.ts index 70ada12b..414d1925 100644 --- a/apps/api/src/services/supabase.ts +++ b/apps/api/src/services/supabase.ts @@ -8,8 +8,9 @@ class SupabaseService { constructor() { const supabaseUrl = process.env.SUPABASE_URL; const supabaseServiceToken = process.env.SUPABASE_SERVICE_TOKEN; + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; // Only initialize the Supabase client if both URL and Service Token are provided. - if (process.env.USE_DB_AUTHENTICATION === "false") { + if (!useDbAuthentication) { // Warn the user that Authentication is disabled by setting the client to null Logger.warn( "Authentication is disabled. Supabase client will not be initialized." diff --git a/apps/test-suite/utils/supabase.ts b/apps/test-suite/utils/supabase.ts index abf7fd78..3e66a991 100644 --- a/apps/test-suite/utils/supabase.ts +++ b/apps/test-suite/utils/supabase.ts @@ -9,7 +9,8 @@ class SupabaseService { const supabaseUrl = process.env.SUPABASE_URL; const supabaseServiceToken = process.env.SUPABASE_SERVICE_TOKEN; // Only initialize the Supabase client if both URL and Service Token are provided. - if (process.env.USE_DB_AUTHENTICATION === "false") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (!useDbAuthentication) { // Warn the user that Authentication is disabled by setting the client to null console.warn( "Authentication is disabled. Supabase client will not be initialized." @@ -36,7 +37,8 @@ export const supabase_service: SupabaseClient = new Proxy( new SupabaseService(), { get: function (target, prop, receiver) { - if (process.env.USE_DB_AUTHENTICATION === "false") { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + if (!useDbAuthentication) { console.debug( "Attempted to access Supabase client when it's not configured." ); From c3aeed510b10793f93c723bacf365bb27f2a57b3 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Mon, 12 Aug 2024 16:40:31 -0300 Subject: [PATCH 03/31] Update single_url.ts --- apps/api/src/scraper/WebScraper/single_url.ts | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index 96adcf49..0c0de938 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -24,13 +24,15 @@ import { clientSideError } from "../../strings"; dotenv.config(); const useDatabaseAuth = process.env.USE_DB_AUTHENTICATION === "true"; +const useScrapingBee = process.env.SCRAPING_BEE_API_KEY !== '' && process.env.SCRAPING_BEE_API_KEY !== undefined; +const useFireEngine = process.env.FIRE_ENGINE_BETA_URL !== '' && process.env.FIRE_ENGINE_BETA_URL !== undefined; export const baseScrapers = [ - "fire-engine", - "fire-engine;chrome-cdp", - "scrapingBee", + useFireEngine ? "fire-engine" : undefined, + useFireEngine ? "fire-engine;chrome-cdp" : undefined, + useScrapingBee ? "scrapingBee" : undefined, useDatabaseAuth ? undefined : "playwright", - "scrapingBeeLoad", + useScrapingBee ? "scrapingBeeLoad" : undefined, "fetch", ].filter(Boolean); @@ -87,11 +89,11 @@ function getScrapingFallbackOrder( }); let defaultOrder = [ - !useDatabaseAuth ? undefined : "fire-engine", - !useDatabaseAuth ? undefined : "fire-engine;chrome-cdp", - "scrapingBee", + useFireEngine ? "fire-engine" : undefined, + useFireEngine ? "fire-engine;chrome-cdp" : undefined, + useScrapingBee ? "scrapingBee" : undefined, + useScrapingBee ? "scrapingBeeLoad" : undefined, useDatabaseAuth ? undefined : "playwright", - "scrapingBeeLoad", "fetch", ].filter(Boolean); From 7c339ea125df178be880841bc34bab6a2cedf5ee Mon Sep 17 00:00:00 2001 From: Rafael Miller <150964962+rafaelsideguide@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:55:10 -0300 Subject: [PATCH 04/31] Update single_url.ts --- apps/api/src/scraper/WebScraper/single_url.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index 0c0de938..d844535b 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -93,14 +93,14 @@ function getScrapingFallbackOrder( useFireEngine ? "fire-engine;chrome-cdp" : undefined, useScrapingBee ? "scrapingBee" : undefined, useScrapingBee ? "scrapingBeeLoad" : undefined, - useDatabaseAuth ? undefined : "playwright", + useFireEngine ? undefined : "playwright", "fetch", ].filter(Boolean); if (isWaitPresent || isScreenshotPresent || isHeadersPresent) { defaultOrder = [ "fire-engine", - useDatabaseAuth ? undefined : "playwright", + useFireEngine ? undefined : "playwright", ...defaultOrder.filter( (scraper) => scraper !== "fire-engine" && scraper !== "playwright" ), From 76160a38db8f556261506eeb3ce25f28086d94fc Mon Sep 17 00:00:00 2001 From: Rafael Miller <150964962+rafaelsideguide@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:57:00 -0300 Subject: [PATCH 05/31] Update single_url.ts --- apps/api/src/scraper/WebScraper/single_url.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index d844535b..d5bdcffe 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -23,7 +23,6 @@ import { clientSideError } from "../../strings"; dotenv.config(); -const useDatabaseAuth = process.env.USE_DB_AUTHENTICATION === "true"; const useScrapingBee = process.env.SCRAPING_BEE_API_KEY !== '' && process.env.SCRAPING_BEE_API_KEY !== undefined; const useFireEngine = process.env.FIRE_ENGINE_BETA_URL !== '' && process.env.FIRE_ENGINE_BETA_URL !== undefined; @@ -31,7 +30,7 @@ export const baseScrapers = [ useFireEngine ? "fire-engine" : undefined, useFireEngine ? "fire-engine;chrome-cdp" : undefined, useScrapingBee ? "scrapingBee" : undefined, - useDatabaseAuth ? undefined : "playwright", + useFireEngine ? undefined : "playwright", useScrapingBee ? "scrapingBeeLoad" : undefined, "fetch", ].filter(Boolean); From 00dacaf935a38249698b41e6571c980e440d3a62 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Mon, 2 Sep 2024 18:43:52 -0300 Subject: [PATCH 06/31] Nick: scrape id when origin is website for report system --- apps/api/src/controllers/v1/scrape.ts | 1 + apps/api/src/controllers/v1/types.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/apps/api/src/controllers/v1/scrape.ts b/apps/api/src/controllers/v1/scrape.ts index c573e100..9752674f 100644 --- a/apps/api/src/controllers/v1/scrape.ts +++ b/apps/api/src/controllers/v1/scrape.ts @@ -147,5 +147,6 @@ export async function scrapeController( return res.status(200).json({ success: true, data: legacyDocumentConverter(doc), + scrape_id: origin === "website" ? jobId : undefined, }); } diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts index 85bd625f..f9f50317 100644 --- a/apps/api/src/controllers/v1/types.ts +++ b/apps/api/src/controllers/v1/types.ts @@ -225,6 +225,7 @@ export type ScrapeResponse = success: true; warning?: string; data: Document; + scrape_id?: string; }; export interface ScrapeResponseRequestTest { From 2444f7c450acac23f0b512ad4a33f4e15e448f35 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Mon, 2 Sep 2024 18:51:18 -0300 Subject: [PATCH 07/31] Update scrape.ts --- apps/api/src/controllers/v1/scrape.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/api/src/controllers/v1/scrape.ts b/apps/api/src/controllers/v1/scrape.ts index 9752674f..9fba1a45 100644 --- a/apps/api/src/controllers/v1/scrape.ts +++ b/apps/api/src/controllers/v1/scrape.ts @@ -147,6 +147,6 @@ export async function scrapeController( return res.status(200).json({ success: true, data: legacyDocumentConverter(doc), - scrape_id: origin === "website" ? jobId : undefined, + scrape_id: origin?.includes("website") ? jobId : undefined, }); } From 3244454826f71c7c7094b4fa7f5c8727b7ad8c65 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Mon, 2 Sep 2024 19:36:36 -0300 Subject: [PATCH 08/31] Nick: --- apps/api/src/controllers/v1/map.ts | 1 + apps/api/src/controllers/v1/types.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/apps/api/src/controllers/v1/map.ts b/apps/api/src/controllers/v1/map.ts index 32294a0f..21e91840 100644 --- a/apps/api/src/controllers/v1/map.ts +++ b/apps/api/src/controllers/v1/map.ts @@ -127,5 +127,6 @@ export async function mapController( return res.status(200).json({ success: true, links: linksToReturn, + scrape_id: req.body.origin?.includes("website") ? id : undefined, }); } diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts index f9f50317..c4e0cf84 100644 --- a/apps/api/src/controllers/v1/types.ts +++ b/apps/api/src/controllers/v1/types.ts @@ -247,6 +247,7 @@ export type MapResponse = | { success: true; links: string[]; + scrape_id?: string; }; export type CrawlStatusParams = { From 411d7f31c5347b1524632b6743e3b37508817b9a Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:56:24 -0300 Subject: [PATCH 09/31] fix(sdks): fetch next/pagination --- apps/python-sdk/firecrawl/__init__.py | 2 +- apps/python-sdk/firecrawl/firecrawl.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/apps/python-sdk/firecrawl/__init__.py b/apps/python-sdk/firecrawl/__init__.py index 4b3807be..59c5fe35 100644 --- a/apps/python-sdk/firecrawl/__init__.py +++ b/apps/python-sdk/firecrawl/__init__.py @@ -13,7 +13,7 @@ import os from .firecrawl import FirecrawlApp -__version__ = "1.2.1" +__version__ = "1.2.2" # Define the logger for the Firecrawl project logger: logging.Logger = logging.getLogger("firecrawl") diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index 75245e8d..d65b0341 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -346,6 +346,12 @@ class FirecrawlApp: status_data = status_response.json() if status_data['status'] == 'completed': if 'data' in status_data: + data = status_data['data'] + while 'next' in status_data: + status_response = self._get_request(status_data['next'], headers) + status_data = status_response.json() + data.extend(status_data['data']) + status_data['data'] = data return status_data else: raise Exception('Crawl job completed but no data was returned') From 6ccc22ba2fd77e5fe993c2dd64b8c82900bb352a Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:57:19 -0300 Subject: [PATCH 10/31] fix(sdk): js next pagination --- apps/js-sdk/firecrawl/package.json | 2 +- apps/js-sdk/firecrawl/src/index.ts | 13 ++++++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/apps/js-sdk/firecrawl/package.json b/apps/js-sdk/firecrawl/package.json index e68b3014..7114a625 100644 --- a/apps/js-sdk/firecrawl/package.json +++ b/apps/js-sdk/firecrawl/package.json @@ -1,6 +1,6 @@ { "name": "@mendable/firecrawl-js", - "version": "1.2.1", + "version": "1.2.2", "description": "JavaScript SDK for Firecrawl API", "main": "build/cjs/index.js", "types": "types/index.d.ts", diff --git a/apps/js-sdk/firecrawl/src/index.ts b/apps/js-sdk/firecrawl/src/index.ts index 1d1bb4ee..8b16adfb 100644 --- a/apps/js-sdk/firecrawl/src/index.ts +++ b/apps/js-sdk/firecrawl/src/index.ts @@ -454,20 +454,27 @@ export default class FirecrawlApp { checkInterval: number ): Promise { while (true) { - const statusResponse: AxiosResponse = await this.getRequest( + let statusResponse: AxiosResponse = await this.getRequest( `${this.apiUrl}/v1/crawl/${id}`, headers ); if (statusResponse.status === 200) { - const statusData = statusResponse.data; + let statusData = statusResponse.data; if (statusData.status === "completed") { if ("data" in statusData) { + let data = statusData.data; + while ('next' in statusData) { + statusResponse = await this.getRequest(statusData.next, headers); + statusData = statusResponse.data; + data = data.concat(statusData.data); + } + statusData.data = data; return statusData; } else { throw new Error("Crawl job completed but no data was returned"); } } else if ( - ["active", "paused", "pending", "queued", "scraping"].includes(statusData.status) + ["active", "paused", "pending", "queued", "waiting", "scraping"].includes(statusData.status) ) { checkInterval = Math.max(checkInterval, 2); await new Promise((resolve) => From 7561fd279f7d48ea0572b14175d61008a615da5a Mon Sep 17 00:00:00 2001 From: Nicolas Date: Tue, 3 Sep 2024 17:08:12 -0300 Subject: [PATCH 11/31] Nick: debug the billing email system for free credits --- apps/api/src/services/billing/credit_billing.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index 22dc72df..d828a54e 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -255,7 +255,9 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { const creditLimit = FREE_CREDITS; const creditUsagePercentage = (totalCreditsUsed + credits) / creditLimit; - if (creditUsagePercentage >= 0.8) { + // Add a check to ensure totalCreditsUsed is greater than 0 + if (totalCreditsUsed > 0 && creditUsagePercentage >= 0.8 && creditUsagePercentage < 1) { + Logger.info(`Sending notification for team ${team_id}. Total credits used: ${totalCreditsUsed}, Credit usage percentage: ${creditUsagePercentage}`); await sendNotification( team_id, NotificationType.APPROACHING_LIMIT, From 3072d4a33305c888b6650bd895526319c942f40e Mon Sep 17 00:00:00 2001 From: Nicolas Date: Tue, 3 Sep 2024 21:02:41 -0300 Subject: [PATCH 12/31] Nick: fixed .sort coupons and sentry to withAuth --- apps/api/src/lib/withAuth.ts | 2 ++ apps/api/src/services/billing/credit_billing.ts | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/api/src/lib/withAuth.ts b/apps/api/src/lib/withAuth.ts index 1979907e..90cfb449 100644 --- a/apps/api/src/lib/withAuth.ts +++ b/apps/api/src/lib/withAuth.ts @@ -1,5 +1,6 @@ import { AuthResponse } from "../../src/types"; import { Logger } from "./logger"; +import * as Sentry from "@sentry/node"; let warningCount = 0; @@ -18,6 +19,7 @@ export function withAuth( try { return await originalFunction(...args); } catch (error) { + Sentry.captureException(error); Logger.error(`Error in withAuth function: ${error}`); return { success: false, error: error.message } as T; } diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index d828a54e..2cfea85a 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -40,14 +40,15 @@ export async function supaBillTeam(team_id: string, credits: number) { ]); let couponCredits = 0; + let sortedCoupons = []; + if (coupons && coupons.length > 0) { couponCredits = coupons.reduce( (total, coupon) => total + coupon.credits, 0 ); + sortedCoupons = [...coupons].sort((a, b) => b.credits - a.credits); } - - let sortedCoupons = coupons.sort((a, b) => b.credits - a.credits); // using coupon credits: if (couponCredits > 0) { // if there is no subscription and they have enough coupon credits From 049a11187d0a0143721c8ba745889e928ced4b38 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Tue, 3 Sep 2024 21:09:32 -0300 Subject: [PATCH 13/31] Nick: --- apps/api/src/controllers/v0/scrape.ts | 12 ++++-------- apps/api/src/controllers/v0/search.ts | 16 ++++------------ apps/api/src/controllers/v1/map.ts | 6 +++++- apps/api/src/controllers/v1/scrape.ts | 12 ++++-------- apps/api/src/main/runWebScraper.ts | 13 ++++--------- apps/api/src/services/billing/credit_billing.ts | 9 ++++----- 6 files changed, 25 insertions(+), 43 deletions(-) diff --git a/apps/api/src/controllers/v0/scrape.ts b/apps/api/src/controllers/v0/scrape.ts index 40df5021..bc91da18 100644 --- a/apps/api/src/controllers/v0/scrape.ts +++ b/apps/api/src/controllers/v0/scrape.ts @@ -244,14 +244,10 @@ export async function scrapeController(req: Request, res: Response) { } if (creditsToBeBilled > 0) { // billing for doc done on queue end, bill only for llm extraction - const billingResult = await billTeam(team_id, creditsToBeBilled); - if (!billingResult.success) { - return res.status(402).json({ - success: false, - error: - "Failed to bill team. Insufficient credits or subscription not found.", - }); - } + billTeam(team_id, creditsToBeBilled).catch(error => { + Logger.error(`Failed to bill team ${team_id} for ${creditsToBeBilled} credits: ${error}`); + // Optionally, you could notify an admin or add to a retry queue here + }); } } diff --git a/apps/api/src/controllers/v0/search.ts b/apps/api/src/controllers/v0/search.ts index 825abbe1..5ef2b767 100644 --- a/apps/api/src/controllers/v0/search.ts +++ b/apps/api/src/controllers/v0/search.ts @@ -54,18 +54,10 @@ export async function searchHelper( if (justSearch) { - const billingResult = await billTeam( - team_id, - res.length - ); - if (!billingResult.success) { - return { - success: false, - error: - "Failed to bill team. Insufficient credits or subscription not found.", - returnCode: 402, - }; - } + billTeam(team_id, res.length).catch(error => { + Logger.error(`Failed to bill team ${team_id} for ${res.length} credits: ${error}`); + // Optionally, you could notify an admin or add to a retry queue here + }); return { success: true, data: res, returnCode: 200 }; } diff --git a/apps/api/src/controllers/v1/map.ts b/apps/api/src/controllers/v1/map.ts index 21e91840..4c94f041 100644 --- a/apps/api/src/controllers/v1/map.ts +++ b/apps/api/src/controllers/v1/map.ts @@ -18,6 +18,7 @@ import { fireEngineMap } from "../../search/fireEngine"; import { billTeam } from "../../services/billing/credit_billing"; import { logJob } from "../../services/logging/log_job"; import { performCosineSimilarity } from "../../lib/map-cosine"; +import { Logger } from "../../lib/logger"; configDotenv(); @@ -100,7 +101,10 @@ export async function mapController( // remove duplicates that could be due to http/https or www links = removeDuplicateUrls(links); - await billTeam(req.auth.team_id, 1); + billTeam(req.auth.team_id, 1).catch(error => { + Logger.error(`Failed to bill team ${req.auth.team_id} for 1 credit: ${error}`); + // Optionally, you could notify an admin or add to a retry queue here + }); const endTime = new Date().getTime(); const timeTakenInSeconds = (endTime - startTime) / 1000; diff --git a/apps/api/src/controllers/v1/scrape.ts b/apps/api/src/controllers/v1/scrape.ts index 9fba1a45..0835cc2a 100644 --- a/apps/api/src/controllers/v1/scrape.ts +++ b/apps/api/src/controllers/v1/scrape.ts @@ -106,14 +106,10 @@ export async function scrapeController( creditsToBeBilled = 50; } - const billingResult = await billTeam(req.auth.team_id, creditsToBeBilled); - if (!billingResult.success) { - return res.status(402).json({ - success: false, - error: - "Failed to bill team. Insufficient credits or subscription not found.", - }); - } + billTeam(req.auth.team_id, creditsToBeBilled).catch(error => { + Logger.error(`Failed to bill team ${req.auth.team_id} for ${creditsToBeBilled} credits: ${error}`); + // Optionally, you could notify an admin or add to a retry queue here + }); if (!pageOptions || !pageOptions.includeRawHtml) { if (doc && doc.rawHtml) { diff --git a/apps/api/src/main/runWebScraper.ts b/apps/api/src/main/runWebScraper.ts index 2268f9ed..cd199fa1 100644 --- a/apps/api/src/main/runWebScraper.ts +++ b/apps/api/src/main/runWebScraper.ts @@ -118,15 +118,10 @@ export async function runWebScraper({ : docs; if(is_scrape === false) { - const billingResult = await billTeam(team_id, filteredDocs.length); - if (!billingResult.success) { - // throw new Error("Failed to bill team, no subscription was found"); - return { - success: false, - message: "Failed to bill team, no subscription was found", - docs: [], - }; - } + billTeam(team_id, filteredDocs.length).catch(error => { + Logger.error(`Failed to bill team ${team_id} for ${filteredDocs.length} credits: ${error}`); + // Optionally, you could notify an admin or add to a retry queue here + }); } diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index 2cfea85a..ab00eab9 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -465,8 +465,8 @@ async function createCreditUsage({ subscription_id?: string; credits: number; }) { - const { data: credit_usage } = await supabase_service - .from("credit_usage") + await supabase_service + .from("credit_usage") .insert([ { team_id, @@ -474,8 +474,7 @@ async function createCreditUsage({ subscription_id: subscription_id || null, created_at: new Date(), }, - ]) - .select(); + ]); - return { success: true, credit_usage }; + return { success: true }; } From 653b76fe3dc13f7271260386a09120ac4823ae71 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Tue, 3 Sep 2024 23:33:29 -0300 Subject: [PATCH 14/31] Update README.md --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 89ed0127..3803716f 100644 --- a/README.md +++ b/README.md @@ -391,7 +391,7 @@ With LLM extraction, you can easily extract structured data from any URL. We sup from firecrawl.firecrawl import FirecrawlApp -app = FirecrawlApp(api_key="fc-YOUR_API_KEY", version="v0") +app = FirecrawlApp(api_key="fc-YOUR_API_KEY") class ArticleSchema(BaseModel): title: str @@ -466,8 +466,7 @@ import FirecrawlApp from "@mendable/firecrawl-js"; import { z } from "zod"; const app = new FirecrawlApp({ - apiKey: "fc-YOUR_API_KEY", - version: "v0" + apiKey: "fc-YOUR_API_KEY" }); // Define schema to extract contents into From 28df35382949f40e6eba41977c49f221aa0876e7 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:16:31 -0300 Subject: [PATCH 15/31] fix(cicd): wait and moved rust publish --- .github/workflows/fly.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/fly.yml b/.github/workflows/fly.yml index ba4a099e..0fd1c12f 100644 --- a/.github/workflows/fly.yml +++ b/.github/workflows/fly.yml @@ -57,6 +57,9 @@ jobs: run: npm run workers & working-directory: ./apps/api id: start_workers + - name: Wait for the application to be ready + run: | + sleep 10 - name: Run E2E tests run: | npm run test:prod @@ -338,6 +341,7 @@ jobs: build-and-publish-rust-sdk: name: Build and publish Rust SDK runs-on: ubuntu-latest + needs: deploy steps: - name: Checkout repository From d836ba67821235fb3363065d01fb36f0094044f9 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:35:56 -0300 Subject: [PATCH 16/31] added log to check response on cicd --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index dd7d4f16..40017d2b 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -32,6 +32,10 @@ describe("E2E Tests for v1 API Routes", () => { const response: ScrapeResponseRequestTest = await request(TEST_URL).post( "/v1/scrape" ); + console.log({ + response: response.body, + statusCode: response.statusCode, + }) expect(response.statusCode).toBe(401); }); From 74ac8915cd912b37e779f44bd428e550abda5b6d Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:41:38 -0300 Subject: [PATCH 17/31] details --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index 40017d2b..9d504f1f 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -34,6 +34,7 @@ describe("E2E Tests for v1 API Routes", () => { ); console.log({ response: response.body, + details: response.body.success == false ? response.body.details : null, statusCode: response.statusCode, }) expect(response.statusCode).toBe(401); From 57aa6d18525a34607730b926f3e014f1d9bbac1b Mon Sep 17 00:00:00 2001 From: Eric Ciarla <43451761+ericciarla@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:56:41 -0400 Subject: [PATCH 18/31] Update README.md --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 3803716f..63dd6ea5 100644 --- a/README.md +++ b/README.md @@ -14,10 +14,9 @@ GitHub Contributors - - Open Source + + Visit firecrawl.dev -

From ad950a6c9d641aaeb0550b702a64825c7c11838e Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 11:29:32 -0300 Subject: [PATCH 19/31] fixed controller res and tests --- .../__tests__/e2e_v1_withAuth/index.test.ts | 26 ++++++++----------- apps/api/src/routes/v1.ts | 24 +++++++++++------ 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index 9d504f1f..2d27462f 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -29,14 +29,10 @@ describe("E2E Tests for v1 API Routes", () => { describe("POST /v1/scrape", () => { it.concurrent("should require authorization", async () => { - const response: ScrapeResponseRequestTest = await request(TEST_URL).post( - "/v1/scrape" - ); - console.log({ - response: response.body, - details: response.body.success == false ? response.body.details : null, - statusCode: response.statusCode, - }) + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .send({ url: "https://firecrawl.dev"}) + expect(response.statusCode).toBe(401); }); @@ -456,9 +452,9 @@ describe("E2E Tests for v1 API Routes", () => { describe("POST /v1/map", () => { it.concurrent("should require authorization", async () => { - const response: ScrapeResponseRequestTest = await request(TEST_URL).post( - "/v1/map" - ); + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .send({ url: "https://firecrawl.dev" }); expect(response.statusCode).toBe(401); }); @@ -614,9 +610,9 @@ describe("POST /v1/map", () => { describe("POST /v1/crawl", () => { it.concurrent("should require authorization", async () => { - const response: ScrapeResponseRequestTest = await request(TEST_URL).post( - "/v1/crawl" - ); + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/crawl") + .send({ url: "https://firecrawl.dev" }); expect(response.statusCode).toBe(401); }); @@ -868,7 +864,7 @@ describe("GET /v1/crawl/:jobId", () => { .post("/v1/crawl") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") - .send({ url: "https://docs.mendable.ai" }); + .send({ url: "https://docs.firecrawl.dev" }); expect(crawlResponse.statusCode).toBe(200); let isCompleted = false; diff --git a/apps/api/src/routes/v1.ts b/apps/api/src/routes/v1.ts index 9dcbf111..daa9bf43 100644 --- a/apps/api/src/routes/v1.ts +++ b/apps/api/src/routes/v1.ts @@ -33,7 +33,9 @@ function checkCreditsMiddleware(minimum?: number): (req: RequestWithAuth, res: R const { success, message, remainingCredits } = await checkTeamCredits(req.auth.team_id, minimum); if (!success) { Logger.error(`Insufficient credits: ${JSON.stringify({ team_id: req.auth.team_id, minimum, remainingCredits })}`); - return res.status(402).json({ success: false, error: "Insufficient credits" }); + if (!res.headersSent) { + return res.status(402).json({ success: false, error: "Insufficient credits" }); + } } req.account = { remainingCredits } next(); @@ -52,7 +54,9 @@ export function authMiddleware(rateLimiterMode: RateLimiterMode): (req: RequestW ); if (!success) { - return res.status(status).json({ success: false, error }); + if (!res.headersSent) { + return res.status(status).json({ success: false, error }); + } } req.auth = { team_id, plan }; @@ -67,7 +71,9 @@ function idempotencyMiddleware(req: Request, res: Response, next: NextFunction) if (req.headers["x-idempotency-key"]) { const isIdempotencyValid = await validateIdempotencyKey(req); if (!isIdempotencyValid) { - return res.status(409).json({ success: false, error: "Idempotency key already used" }); + if (!res.headersSent) { + return res.status(409).json({ success: false, error: "Idempotency key already used" }); + } } createIdempotencyKey(req); } @@ -78,7 +84,9 @@ function idempotencyMiddleware(req: Request, res: Response, next: NextFunction) function blocklistMiddleware(req: Request, res: Response, next: NextFunction) { if (req.body.url && isUrlBlocked(req.body.url)) { - return res.status(403).json({ success: false, error: "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." }); + if (!res.headersSent) { + return res.status(403).json({ success: false, error: "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." }); + } } next(); } @@ -96,26 +104,26 @@ export const v1Router = express.Router(); v1Router.post( "/scrape", - blocklistMiddleware, authMiddleware(RateLimiterMode.Scrape), checkCreditsMiddleware(1), + blocklistMiddleware, wrap(scrapeController) ); v1Router.post( "/crawl", - blocklistMiddleware, authMiddleware(RateLimiterMode.Crawl), - idempotencyMiddleware, checkCreditsMiddleware(), + blocklistMiddleware, + idempotencyMiddleware, wrap(crawlController) ); v1Router.post( "/map", - blocklistMiddleware, authMiddleware(RateLimiterMode.Map), checkCreditsMiddleware(1), + blocklistMiddleware, wrap(mapController) ); From f98a8541c2e5de16733286482487b84922140db1 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 11:41:50 -0300 Subject: [PATCH 20/31] fix(cicd): added use_db_auth to deploy workflow --- .github/workflows/fly.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/fly.yml b/.github/workflows/fly.yml index 0fd1c12f..9209309f 100644 --- a/.github/workflows/fly.yml +++ b/.github/workflows/fly.yml @@ -28,6 +28,7 @@ env: NPM_TOKEN: ${{ secrets.NPM_TOKEN }} CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + USE_DB_AUTHENTICATION: ${{ secrets.USE_DB_AUTHENTICATION }} jobs: pre-deploy-e2e-tests: From a2a63e42cc69ef314ca89aca9eb9ea121d77af9c Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 4 Sep 2024 12:03:58 -0300 Subject: [PATCH 21/31] Rm print map --- apps/python-sdk/firecrawl/firecrawl.py | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index d65b0341..254f4c70 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -238,7 +238,6 @@ class FirecrawlApp: ) if response.status_code == 200: response = response.json() - print(response) if response['success'] and 'links' in response: return response['links'] else: From 364ba9f90172393bb3c88086cc6eed9edfceab83 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 12:05:02 -0300 Subject: [PATCH 22/31] fix(cicd): mendable->firecrawl and waitfor --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 10 +++++----- apps/api/src/__tests__/e2e_withAuth/index.test.ts | 10 ++++------ 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index 2d27462f..e51b349d 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -390,7 +390,7 @@ describe("E2E Tests for v1 API Routes", () => { const scrapeRequest: ScrapeRequest = { url: "https://ycombinator.com/companies", formats: ["markdown"], - waitFor: 5000 + waitFor: 8000 }; const response: ScrapeResponseRequestTest = await request(TEST_URL) @@ -560,7 +560,9 @@ describe("POST /v1/map", () => { const links = response.body.links as unknown[]; expect(Array.isArray(links)).toBe(true); expect(links.length).toBeGreaterThan(0); - expect(links[0]).toContain("docs.firecrawl.dev"); + + const containsDocsFirecrawlDev = links.some((link: string) => link.includes("docs.firecrawl.dev")); + expect(containsDocsFirecrawlDev).toBe(true); }, 10000) it.concurrent("should return a successful response with a valid API key and search and not allowSubdomains and www", async () => { @@ -894,9 +896,7 @@ describe("GET /v1/crawl/:jobId", () => { expect(completedResponse.body.data[0]).not.toHaveProperty("content"); expect(completedResponse.body.data[0]).toHaveProperty("markdown"); expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].metadata.statusCode).toBe( - 200 - ); + expect(completedResponse.body.data[0].metadata.statusCode).toBe(200); expect( completedResponse.body.data[0].metadata.error ).toBeUndefined(); diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index 330f8130..26caf63e 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -659,7 +659,7 @@ describe("E2E Tests for v0 API Routes", () => { .post("/v0/crawl") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") - .send({ url: "https://mendable.ai/blog" }); + .send({ url: "https://firecrawl.dev/blog" }); expect(crawlResponse.statusCode).toBe(200); let isCompleted = false; @@ -689,10 +689,8 @@ describe("E2E Tests for v0 API Routes", () => { expect(completedResponse.body.data[0]).toHaveProperty("content"); expect(completedResponse.body.data[0]).toHaveProperty("markdown"); expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].content).toContain("Mendable"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( - 200 - ); + expect(completedResponse.body.data[0].content).toContain("Firecrawl"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); expect( completedResponse.body.data[0].metadata.pageError ).toBeUndefined(); @@ -701,7 +699,7 @@ describe("E2E Tests for v0 API Routes", () => { (doc) => doc.metadata && doc.metadata.sourceURL && - doc.metadata.sourceURL.includes("mendable.ai/blog") + doc.metadata.sourceURL.includes("firecrawl.dev/blog") ); expect(childrenLinks.length).toBe(completedResponse.body.data.length); From 1eb993a93b3e00ab6685e5326c2fb41797cfd8ff Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 4 Sep 2024 12:09:28 -0300 Subject: [PATCH 23/31] Update __init__.py --- apps/python-sdk/firecrawl/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/python-sdk/firecrawl/__init__.py b/apps/python-sdk/firecrawl/__init__.py index 59c5fe35..f178cd61 100644 --- a/apps/python-sdk/firecrawl/__init__.py +++ b/apps/python-sdk/firecrawl/__init__.py @@ -13,7 +13,7 @@ import os from .firecrawl import FirecrawlApp -__version__ = "1.2.2" +__version__ = "1.2.3" # Define the logger for the Firecrawl project logger: logging.Logger = logging.getLogger("firecrawl") From 3f462eabe9c976c88b5c27c3b9743647cc8f3244 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 12:27:46 -0300 Subject: [PATCH 24/31] fix(cicd): --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index e51b349d..880d34a1 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -535,7 +535,9 @@ describe("POST /v1/map", () => { const links = response.body.links as unknown[]; expect(Array.isArray(links)).toBe(true); expect(links.length).toBeGreaterThan(0); - expect(links[0]).toContain("docs.firecrawl.dev"); + + const containsDocsFirecrawlDev = links.some((link: string) => link.includes("docs.firecrawl.dev")); + expect(containsDocsFirecrawlDev).toBe(true); }); it.concurrent("should return a successful response with a valid API key and search and allowSubdomains and www", async () => { From 5ecb2436932f00529a0f4116a388f1d874dff8d7 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 4 Sep 2024 15:19:45 -0300 Subject: [PATCH 25/31] Nick: --- .../src/services/billing/credit_billing.ts | 143 +++++++++++++----- apps/api/src/services/queue-jobs.ts | 2 +- 2 files changed, 103 insertions(+), 42 deletions(-) diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index ab00eab9..9ea0435e 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -5,7 +5,7 @@ import { supabase_service } from "../supabase"; import { Logger } from "../../lib/logger"; import { getValue, setValue } from "../redis"; import { redlock } from "../redlock"; - +import * as Sentry from "@sentry/node"; const FREE_CREDITS = 500; @@ -176,9 +176,24 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { return { success: true, message: "Preview team, no credits used", remainingCredits: Infinity }; } - // Retrieve the team's active subscription and check for available coupons concurrently - const [{ data: subscription, error: subscriptionError }, { data: coupons }] = - await Promise.all([ + + let cacheKeySubscription = `subscription_${team_id}`; + let cacheKeyCoupons = `coupons_${team_id}`; + + // Try to get data from cache first + const [cachedSubscription, cachedCoupons] = await Promise.all([ + getValue(cacheKeySubscription), + getValue(cacheKeyCoupons) + ]); + + let subscription, subscriptionError, coupons; + + if (cachedSubscription && cachedCoupons) { + subscription = JSON.parse(cachedSubscription); + coupons = JSON.parse(cachedCoupons); + } else { + // If not in cache, retrieve from database + const [subscriptionResult, couponsResult] = await Promise.all([ supabase_service .from("subscriptions") .select("id, price_id, current_period_start, current_period_end") @@ -192,6 +207,18 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { .eq("status", "active"), ]); + subscription = subscriptionResult.data; + subscriptionError = subscriptionResult.error; + coupons = couponsResult.data; + + // Cache the results for a minute, sub can be null and that's fine + await setValue(cacheKeySubscription, JSON.stringify(subscription), 60); // Cache for 1 minute, even if null + + if (coupons) { + await setValue(cacheKeyCoupons, JSON.stringify(coupons), 60); // Cache for 1 minute + } + } + let couponCredits = 0; if (coupons && coupons.length > 0) { couponCredits = coupons.reduce( @@ -212,41 +239,54 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { let creditUsages; let creditUsageError; - let retries = 0; - const maxRetries = 3; - const retryInterval = 2000; // 2 seconds + let totalCreditsUsed = 0; + const cacheKeyCreditUsage = `credit_usage_${team_id}`; - while (retries < maxRetries) { - const result = await supabase_service - .from("credit_usage") - .select("credits_used") - .is("subscription_id", null) - .eq("team_id", team_id); + // Try to get credit usage from cache + const cachedCreditUsage = await getValue(cacheKeyCreditUsage); - creditUsages = result.data; - creditUsageError = result.error; + if (cachedCreditUsage) { + totalCreditsUsed = parseInt(cachedCreditUsage); + } else { + let retries = 0; + const maxRetries = 3; + const retryInterval = 2000; // 2 seconds - if (!creditUsageError) { - break; + while (retries < maxRetries) { + const result = await supabase_service + .from("credit_usage") + .select("credits_used") + .is("subscription_id", null) + .eq("team_id", team_id); + + creditUsages = result.data; + creditUsageError = result.error; + + if (!creditUsageError) { + break; + } + + retries++; + if (retries < maxRetries) { + await new Promise(resolve => setTimeout(resolve, retryInterval)); + } } - retries++; - if (retries < maxRetries) { - await new Promise(resolve => setTimeout(resolve, retryInterval)); + if (creditUsageError) { + Logger.error(`Credit usage error after ${maxRetries} attempts: ${creditUsageError}`); + throw new Error( + `Failed to retrieve credit usage for team_id: ${team_id}` + ); } - } - if (creditUsageError) { - Logger.error(`Credit usage error after ${maxRetries} attempts: ${creditUsageError}`); - throw new Error( - `Failed to retrieve credit usage for team_id: ${team_id}` + totalCreditsUsed = creditUsages.reduce( + (acc, usage) => acc + usage.credits_used, + 0 ); - } - const totalCreditsUsed = creditUsages.reduce( - (acc, usage) => acc + usage.credits_used, - 0 - ); + // Cache the result for 30 seconds + await setValue(cacheKeyCreditUsage, totalCreditsUsed.toString(), 30); + } Logger.info(`totalCreditsUsed: ${totalCreditsUsed}`); @@ -312,7 +352,7 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { if (creditUsages && creditUsages.length > 0) { totalCreditsUsed = creditUsages[0].total_credits_used; - await setValue(cacheKey, totalCreditsUsed.toString(), 1800); // Cache for 30 minutes + await setValue(cacheKey, totalCreditsUsed.toString(), 500); // Cache for 8 minutes // Logger.info(`Cache set for credit usage: ${totalCreditsUsed}`); } } @@ -325,17 +365,38 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { // Adjust total credits used by subtracting coupon value const adjustedCreditsUsed = Math.max(0, totalCreditsUsed - couponCredits); - // Get the price details - const { data: price, error: priceError } = await supabase_service - .from("prices") - .select("credits") - .eq("id", subscription.price_id) - .single(); - if (priceError) { - throw new Error( - `Failed to retrieve price for price_id: ${subscription.price_id}` - ); + // Get the price details from cache or database + const priceCacheKey = `price_${subscription.price_id}`; + let price; + + try { + const cachedPrice = await getValue(priceCacheKey); + if (cachedPrice) { + price = JSON.parse(cachedPrice); + } else { + const { data, error: priceError } = await supabase_service + .from("prices") + .select("credits") + .eq("id", subscription.price_id) + .single(); + + if (priceError) { + throw new Error( + `Failed to retrieve price for price_id: ${subscription.price_id}` + ); + } + + price = data; + // There are only 21 records, so this is super fine + // Cache the price for a long time (e.g., 1 day) + await setValue(priceCacheKey, JSON.stringify(price), 86400); + } + } catch (error) { + Logger.error(`Error retrieving or caching price: ${error}`); + Sentry.captureException(error); + // If errors, just assume it's a big number so user don't get an error + price = { credits: 1000000 }; } const creditLimit = price.credits; diff --git a/apps/api/src/services/queue-jobs.ts b/apps/api/src/services/queue-jobs.ts index 941b571d..7a698772 100644 --- a/apps/api/src/services/queue-jobs.ts +++ b/apps/api/src/services/queue-jobs.ts @@ -67,6 +67,6 @@ export function waitForJob(jobId: string, timeout: number) { reject((await getScrapeQueue().getJob(jobId)).failedReason); } } - }, 1000); + }, 500); }) } From cb8571abad6d0388daf9b66e7db76a22116df6df Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 15:57:57 -0300 Subject: [PATCH 26/31] fix: enforced dotenv config --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 4 ++-- apps/api/src/controllers/v0/crawl-cancel.ts | 2 ++ apps/api/src/controllers/v0/crawl-status.ts | 2 ++ apps/api/src/controllers/v1/crawl-cancel.ts | 2 ++ apps/api/src/controllers/v1/crawl-status.ts | 2 ++ apps/api/src/lib/logger.ts | 3 +++ apps/api/src/lib/scrape-events.ts | 2 ++ apps/api/src/lib/withAuth.ts | 2 ++ apps/api/src/main/runWebScraper.ts | 2 ++ apps/api/src/services/logging/crawl_log.ts | 3 ++- apps/api/src/services/logging/log_job.ts | 2 ++ apps/api/src/services/logging/scrape_log.ts | 2 ++ apps/api/src/services/queue-worker.ts | 2 ++ apps/api/src/services/supabase.ts | 2 ++ apps/api/src/services/webhook.ts | 2 ++ apps/test-suite/utils/supabase.ts | 3 ++- 16 files changed, 33 insertions(+), 4 deletions(-) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index 880d34a1..913f9408 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -1,11 +1,11 @@ import request from "supertest"; -import dotenv from "dotenv"; +import { configDotenv } from "dotenv"; import { ScrapeRequest, ScrapeResponseRequestTest, } from "../../controllers/v1/types"; -dotenv.config(); +configDotenv(); const TEST_URL = "http://127.0.0.1:3002"; describe("E2E Tests for v1 API Routes", () => { diff --git a/apps/api/src/controllers/v0/crawl-cancel.ts b/apps/api/src/controllers/v0/crawl-cancel.ts index bf1c2d0a..efcd454a 100644 --- a/apps/api/src/controllers/v0/crawl-cancel.ts +++ b/apps/api/src/controllers/v0/crawl-cancel.ts @@ -5,6 +5,8 @@ import { supabase_service } from "../../../src/services/supabase"; import { Logger } from "../../../src/lib/logger"; import { getCrawl, saveCrawl } from "../../../src/lib/crawl-redis"; import * as Sentry from "@sentry/node"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function crawlCancelController(req: Request, res: Response) { try { diff --git a/apps/api/src/controllers/v0/crawl-status.ts b/apps/api/src/controllers/v0/crawl-status.ts index b0649cd0..a3f3f16f 100644 --- a/apps/api/src/controllers/v0/crawl-status.ts +++ b/apps/api/src/controllers/v0/crawl-status.ts @@ -6,6 +6,8 @@ import { Logger } from "../../../src/lib/logger"; import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis"; import { supabaseGetJobsById } from "../../../src/lib/supabase-jobs"; import * as Sentry from "@sentry/node"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function getJobs(ids: string[]) { const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x); diff --git a/apps/api/src/controllers/v1/crawl-cancel.ts b/apps/api/src/controllers/v1/crawl-cancel.ts index 06a5b26e..21fc7cf9 100644 --- a/apps/api/src/controllers/v1/crawl-cancel.ts +++ b/apps/api/src/controllers/v1/crawl-cancel.ts @@ -5,6 +5,8 @@ import { supabase_service } from "../../services/supabase"; import { Logger } from "../../lib/logger"; import { getCrawl, saveCrawl } from "../../lib/crawl-redis"; import * as Sentry from "@sentry/node"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function crawlCancelController(req: Request, res: Response) { try { diff --git a/apps/api/src/controllers/v1/crawl-status.ts b/apps/api/src/controllers/v1/crawl-status.ts index 845f616c..05144a9b 100644 --- a/apps/api/src/controllers/v1/crawl-status.ts +++ b/apps/api/src/controllers/v1/crawl-status.ts @@ -3,6 +3,8 @@ import { CrawlStatusParams, CrawlStatusResponse, ErrorResponse, legacyDocumentCo import { getCrawl, getCrawlExpiry, getCrawlJobs, getDoneJobsOrdered, getDoneJobsOrderedLength } from "../../lib/crawl-redis"; import { getScrapeQueue } from "../../services/queue-service"; import { supabaseGetJobById, supabaseGetJobsById } from "../../lib/supabase-jobs"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function getJob(id: string) { const job = await getScrapeQueue().getJob(id); diff --git a/apps/api/src/lib/logger.ts b/apps/api/src/lib/logger.ts index fb0468c2..cb8b4119 100644 --- a/apps/api/src/lib/logger.ts +++ b/apps/api/src/lib/logger.ts @@ -1,3 +1,6 @@ +import { configDotenv } from "dotenv"; +configDotenv(); + enum LogLevel { NONE = 'NONE', // No logs will be output. ERROR = 'ERROR', // For logging error messages that indicate a failure in a specific operation. diff --git a/apps/api/src/lib/scrape-events.ts b/apps/api/src/lib/scrape-events.ts index ed011b78..ad70dfef 100644 --- a/apps/api/src/lib/scrape-events.ts +++ b/apps/api/src/lib/scrape-events.ts @@ -2,6 +2,8 @@ import { Job } from "bullmq"; import type { baseScrapers } from "../scraper/WebScraper/single_url"; import { supabase_service as supabase } from "../services/supabase"; import { Logger } from "./logger"; +import { configDotenv } from "dotenv"; +configDotenv(); export type ScrapeErrorEvent = { type: "error", diff --git a/apps/api/src/lib/withAuth.ts b/apps/api/src/lib/withAuth.ts index 90cfb449..b45b8973 100644 --- a/apps/api/src/lib/withAuth.ts +++ b/apps/api/src/lib/withAuth.ts @@ -1,6 +1,8 @@ import { AuthResponse } from "../../src/types"; import { Logger } from "./logger"; import * as Sentry from "@sentry/node"; +import { configDotenv } from "dotenv"; +configDotenv(); let warningCount = 0; diff --git a/apps/api/src/main/runWebScraper.ts b/apps/api/src/main/runWebScraper.ts index cd199fa1..f67a1cd0 100644 --- a/apps/api/src/main/runWebScraper.ts +++ b/apps/api/src/main/runWebScraper.ts @@ -12,6 +12,8 @@ import { Document } from "../lib/entities"; import { supabase_service } from "../services/supabase"; import { Logger } from "../lib/logger"; import { ScrapeEvents } from "../lib/scrape-events"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function startWebScraperPipeline({ job, diff --git a/apps/api/src/services/logging/crawl_log.ts b/apps/api/src/services/logging/crawl_log.ts index f19b0297..3850e05b 100644 --- a/apps/api/src/services/logging/crawl_log.ts +++ b/apps/api/src/services/logging/crawl_log.ts @@ -1,6 +1,7 @@ import { supabase_service } from "../supabase"; import { Logger } from "../../../src/lib/logger"; -import "dotenv/config"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function logCrawl(job_id: string, team_id: string) { const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; diff --git a/apps/api/src/services/logging/log_job.ts b/apps/api/src/services/logging/log_job.ts index d4494f09..4d8ee014 100644 --- a/apps/api/src/services/logging/log_job.ts +++ b/apps/api/src/services/logging/log_job.ts @@ -4,6 +4,8 @@ import { FirecrawlJob } from "../../types"; import { posthog } from "../posthog"; import "dotenv/config"; import { Logger } from "../../lib/logger"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function logJob(job: FirecrawlJob) { try { diff --git a/apps/api/src/services/logging/scrape_log.ts b/apps/api/src/services/logging/scrape_log.ts index 30d8fd1e..fbe41653 100644 --- a/apps/api/src/services/logging/scrape_log.ts +++ b/apps/api/src/services/logging/scrape_log.ts @@ -3,6 +3,8 @@ import { ScrapeLog } from "../../types"; import { supabase_service } from "../supabase"; import { PageOptions } from "../../lib/entities"; import { Logger } from "../../lib/logger"; +import { configDotenv } from "dotenv"; +configDotenv(); export async function logScrape( scrapeLog: ScrapeLog, diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index 6488759f..ad0e4ad5 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -36,6 +36,8 @@ import { } from "../../src/lib/job-priority"; import { PlanType } from "../types"; import { getJobs } from "../../src/controllers/v1/crawl-status"; +import { configDotenv } from "dotenv"; +configDotenv(); if (process.env.ENV === "production") { initSDK({ diff --git a/apps/api/src/services/supabase.ts b/apps/api/src/services/supabase.ts index 414d1925..7636717e 100644 --- a/apps/api/src/services/supabase.ts +++ b/apps/api/src/services/supabase.ts @@ -1,5 +1,7 @@ import { createClient, SupabaseClient } from "@supabase/supabase-js"; import { Logger } from "../lib/logger"; +import { configDotenv } from "dotenv"; +configDotenv(); // SupabaseService class initializes the Supabase client conditionally based on environment variables. class SupabaseService { diff --git a/apps/api/src/services/webhook.ts b/apps/api/src/services/webhook.ts index 56dd5c58..06e5649d 100644 --- a/apps/api/src/services/webhook.ts +++ b/apps/api/src/services/webhook.ts @@ -3,6 +3,8 @@ import { legacyDocumentConverter } from "../../src/controllers/v1/types"; import { Logger } from "../../src/lib/logger"; import { supabase_service } from "./supabase"; import { WebhookEventType } from "../types"; +import { configDotenv } from "dotenv"; +configDotenv(); export const callWebhook = async ( teamId: string, diff --git a/apps/test-suite/utils/supabase.ts b/apps/test-suite/utils/supabase.ts index 3e66a991..a1549e24 100644 --- a/apps/test-suite/utils/supabase.ts +++ b/apps/test-suite/utils/supabase.ts @@ -1,5 +1,6 @@ import { createClient, SupabaseClient } from "@supabase/supabase-js"; -import "dotenv/config"; +import { configDotenv } from "dotenv"; +configDotenv(); // SupabaseService class initializes the Supabase client conditionally based on environment variables. class SupabaseService { From 78edf13ec6f52c12956b576712c4ca663a5d16ad Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 16:31:42 -0300 Subject: [PATCH 27/31] test: usedbauth envs wth --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index 913f9408..5631adf0 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -22,6 +22,13 @@ describe("E2E Tests for v1 API Routes", () => { const response: ScrapeResponseRequestTest = await request(TEST_URL).get( "/is-production" ); + + console.log('process.env.USE_DB_AUTHENTICATION', process.env.USE_DB_AUTHENTICATION); + console.log('?', process.env.USE_DB_AUTHENTICATION === 'true'); + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + console.log('useDbAuthentication', useDbAuthentication); + console.log('!useDbAuthentication', !useDbAuthentication); + expect(response.statusCode).toBe(200); expect(response.body).toHaveProperty("isProduction"); }); From 85b824e122a095595b9f188902eb771590392f06 Mon Sep 17 00:00:00 2001 From: rafaelsideguide <150964962+rafaelsideguide@users.noreply.github.com> Date: Wed, 4 Sep 2024 16:35:32 -0300 Subject: [PATCH 28/31] test: what about false false? --- apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index 5631adf0..8aabf748 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -26,7 +26,7 @@ describe("E2E Tests for v1 API Routes", () => { console.log('process.env.USE_DB_AUTHENTICATION', process.env.USE_DB_AUTHENTICATION); console.log('?', process.env.USE_DB_AUTHENTICATION === 'true'); const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; - console.log('useDbAuthentication', useDbAuthentication); + console.log('!!useDbAuthentication', !!useDbAuthentication); console.log('!useDbAuthentication', !useDbAuthentication); expect(response.statusCode).toBe(200); From 28c5635502ebfdc98852ecf576cf7b9aa27f48e8 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 4 Sep 2024 16:45:56 -0300 Subject: [PATCH 29/31] Update ci.yml --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b2e42e4a..ff22858b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,6 +28,7 @@ env: HYPERDX_API_KEY: ${{ secrets.HYPERDX_API_KEY }} HDX_NODE_BETA_MODE: 1 FIRE_ENGINE_BETA_URL: ${{ secrets.FIRE_ENGINE_BETA_URL }} + USE_DB_AUTHENTICATION: ${{ secrets.USE_DB_AUTHENTICATION }} jobs: From a0113dac3753725500d659f632bfe77a67e8e191 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 4 Sep 2024 16:54:20 -0300 Subject: [PATCH 30/31] Update credit_billing.ts --- apps/api/src/services/billing/credit_billing.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index 9ea0435e..53031de9 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -213,10 +213,8 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { // Cache the results for a minute, sub can be null and that's fine await setValue(cacheKeySubscription, JSON.stringify(subscription), 60); // Cache for 1 minute, even if null - - if (coupons) { - await setValue(cacheKeyCoupons, JSON.stringify(coupons), 60); // Cache for 1 minute - } + await setValue(cacheKeyCoupons, JSON.stringify(coupons), 60); // Cache for 1 minute + } let couponCredits = 0; From 82cb80c8170b299c83cf954b94c6d9c30c2166c0 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Wed, 4 Sep 2024 23:46:18 -0300 Subject: [PATCH 31/31] Update map.ts --- apps/api/src/controllers/v1/map.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/api/src/controllers/v1/map.ts b/apps/api/src/controllers/v1/map.ts index 4c94f041..e6abd9ae 100644 --- a/apps/api/src/controllers/v1/map.ts +++ b/apps/api/src/controllers/v1/map.ts @@ -62,8 +62,8 @@ export async function mapController( : `site:${req.body.url}`; // www. seems to exclude subdomains in some cases const mapResults = await fireEngineMap(mapUrl, { - // limit to 50 results (beta) - numResults: Math.min(limit, 50), + // limit to 100 results (beta) + numResults: Math.min(limit, 100), }); if (mapResults.length > 0) {