mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 03:32:22 +08:00
Nick: improvements
This commit is contained in:
parent
f473793ba3
commit
614c073af0
|
@ -1,5 +1,5 @@
|
|||
import { parseApi } from "../../src/lib/parseApi";
|
||||
import { getRateLimiter, crawlRateLimit, scrapeRateLimit } from "../../src/services/rate-limiter";
|
||||
import { getRateLimiter, } from "../../src/services/rate-limiter";
|
||||
import { AuthResponse, RateLimiterMode } from "../../src/types";
|
||||
import { supabase_service } from "../../src/services/supabase";
|
||||
import { withAuth } from "../../src/lib/withAuth";
|
||||
|
@ -68,7 +68,7 @@ export async function supaAuthenticateUser(
|
|||
if (error) {
|
||||
console.error('Error fetching key and price_id:', error);
|
||||
} else {
|
||||
console.log('Key and Price ID:', data);
|
||||
// console.log('Key and Price ID:', data);
|
||||
}
|
||||
|
||||
if (error || !data || data.length === 0) {
|
||||
|
@ -79,20 +79,27 @@ export async function supaAuthenticateUser(
|
|||
};
|
||||
}
|
||||
|
||||
|
||||
subscriptionData = {
|
||||
team_id: data[0].team_id,
|
||||
plan: getPlanByPriceId(data[0].price_id)
|
||||
}
|
||||
switch (mode) {
|
||||
case RateLimiterMode.Crawl:
|
||||
rateLimiter = crawlRateLimit(subscriptionData.plan);
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Crawl, token, subscriptionData.plan);
|
||||
break;
|
||||
case RateLimiterMode.Scrape:
|
||||
rateLimiter = scrapeRateLimit(subscriptionData.plan);
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Scrape, token, subscriptionData.plan);
|
||||
break;
|
||||
case RateLimiterMode.CrawlStatus:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token);
|
||||
break;
|
||||
case RateLimiterMode.Search:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Search, token);
|
||||
break;
|
||||
case RateLimiterMode.Preview:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Preview, token);
|
||||
break;
|
||||
default:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Crawl, token);
|
||||
break;
|
||||
|
|
|
@ -10,6 +10,10 @@ const MAX_SCRAPES_PER_MINUTE_STARTER = 10;
|
|||
const MAX_SCRAPES_PER_MINUTE_STANDARD = 15;
|
||||
const MAX_SCRAPES_PER_MINUTE_SCALE = 30;
|
||||
|
||||
const MAX_SEARCHES_PER_MINUTE_STARTER = 10;
|
||||
const MAX_SEARCHES_PER_MINUTE_STANDARD = 15;
|
||||
const MAX_SEARCHES_PER_MINUTE_SCALE = 30;
|
||||
|
||||
const MAX_REQUESTS_PER_MINUTE_PREVIEW = 5;
|
||||
const MAX_REQUESTS_PER_MINUTE_ACCOUNT = 20;
|
||||
const MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS = 120;
|
||||
|
@ -48,7 +52,17 @@ export const testSuiteRateLimiter = new RateLimiterRedis({
|
|||
});
|
||||
|
||||
|
||||
export function crawlRateLimit (plan: string){
|
||||
export function getRateLimiter(mode: RateLimiterMode, token: string, plan?: string){
|
||||
// Special test suite case. TODO: Change this later.
|
||||
if (token.includes("5089cefa58")){
|
||||
return testSuiteRateLimiter;
|
||||
}
|
||||
switch (mode) {
|
||||
case RateLimiterMode.Preview:
|
||||
return previewRateLimiter;
|
||||
case RateLimiterMode.CrawlStatus:
|
||||
return crawlStatusRateLimiter;
|
||||
case RateLimiterMode.Crawl:
|
||||
if (plan === "standard"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
|
@ -70,9 +84,7 @@ export function crawlRateLimit (plan: string){
|
|||
points: MAX_CRAWLS_PER_MINUTE_STARTER,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
}
|
||||
|
||||
export function scrapeRateLimit (plan: string){
|
||||
case RateLimiterMode.Scrape:
|
||||
if (plan === "standard"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
|
@ -94,18 +106,28 @@ export function scrapeRateLimit (plan: string){
|
|||
points: MAX_SCRAPES_PER_MINUTE_STARTER,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
}
|
||||
|
||||
export function getRateLimiter(mode: RateLimiterMode, token: string){
|
||||
// Special test suite case. TODO: Change this later.
|
||||
if (token.includes("5089cefa58")){
|
||||
return testSuiteRateLimiter;
|
||||
case RateLimiterMode.Search:
|
||||
if (plan === "standard"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "search-standard",
|
||||
points: MAX_SEARCHES_PER_MINUTE_STANDARD,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
} else if (plan === "scale"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "search-scale",
|
||||
points: MAX_SEARCHES_PER_MINUTE_SCALE,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
}
|
||||
switch (mode) {
|
||||
case RateLimiterMode.Preview:
|
||||
return previewRateLimiter;
|
||||
case RateLimiterMode.CrawlStatus:
|
||||
return crawlStatusRateLimiter;
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "search-starter",
|
||||
points: MAX_SEARCHES_PER_MINUTE_STARTER,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
default:
|
||||
return serverRateLimiter;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue
Block a user