mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 03:32:22 +08:00
Merge pull request #216 from mendableai/nsc/new-pricing
feat: New pricing/limits changes
This commit is contained in:
commit
0c115c6181
|
@ -31,6 +31,13 @@ POSTHOG_HOST= # set if you'd like to send posthog events like job logs
|
|||
|
||||
STRIPE_PRICE_ID_STANDARD=
|
||||
STRIPE_PRICE_ID_SCALE=
|
||||
STRIPE_PRICE_ID_STARTER=
|
||||
STRIPE_PRICE_ID_HOBBY=
|
||||
STRIPE_PRICE_ID_HOBBY_YEARLY=
|
||||
STRIPE_PRICE_ID_STANDARD_NEW=
|
||||
STRIPE_PRICE_ID_STANDARD_NEW_YEARLY=
|
||||
STRIPE_PRICE_ID_GROWTH=
|
||||
STRIPE_PRICE_ID_GROWTH_YEARLY=
|
||||
|
||||
HYPERDX_API_KEY=
|
||||
HDX_NODE_BETA_MODE=1
|
||||
|
|
|
@ -1004,7 +1004,7 @@ describe("E2E Tests for API Routes", () => {
|
|||
|
||||
describe("Rate Limiter", () => {
|
||||
it("should return 429 when rate limit is exceeded for preview token", async () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
for (let i = 0; i < 4; i++) {
|
||||
const response = await request(TEST_URL)
|
||||
.post("/v0/scrape")
|
||||
.set("Authorization", `Bearer this_is_just_a_preview_token`)
|
||||
|
|
|
@ -29,6 +29,7 @@ export async function supaAuthenticateUser(
|
|||
team_id?: string;
|
||||
error?: string;
|
||||
status?: number;
|
||||
plan?: string;
|
||||
}> {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) {
|
||||
|
@ -104,12 +105,13 @@ export async function supaAuthenticateUser(
|
|||
case RateLimiterMode.Scrape:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Scrape, token, subscriptionData.plan);
|
||||
break;
|
||||
case RateLimiterMode.Search:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Search, token, subscriptionData.plan);
|
||||
break;
|
||||
case RateLimiterMode.CrawlStatus:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token);
|
||||
break;
|
||||
case RateLimiterMode.Search:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Search, token);
|
||||
break;
|
||||
|
||||
case RateLimiterMode.Preview:
|
||||
rateLimiter = getRateLimiter(RateLimiterMode.Preview, token);
|
||||
break;
|
||||
|
@ -172,16 +174,24 @@ export async function supaAuthenticateUser(
|
|||
subscriptionData = data[0];
|
||||
}
|
||||
|
||||
return { success: true, team_id: subscriptionData.team_id };
|
||||
return { success: true, team_id: subscriptionData.team_id, plan: subscriptionData.plan ?? ""};
|
||||
}
|
||||
|
||||
function getPlanByPriceId(price_id: string) {
|
||||
switch (price_id) {
|
||||
case process.env.STRIPE_PRICE_ID_STARTER:
|
||||
return 'starter';
|
||||
case process.env.STRIPE_PRICE_ID_STANDARD:
|
||||
return 'standard';
|
||||
case process.env.STRIPE_PRICE_ID_SCALE:
|
||||
return 'scale';
|
||||
case process.env.STRIPE_PRICE_ID_HOBBY || process.env.STRIPE_PRICE_ID_HOBBY_YEARLY:
|
||||
return 'hobby';
|
||||
case process.env.STRIPE_PRICE_ID_STANDARD_NEW || process.env.STRIPE_PRICE_ID_STANDARD_NEW_YEARLY:
|
||||
return 'standard-new';
|
||||
case process.env.STRIPE_PRICE_ID_GROWTH || process.env.STRIPE_PRICE_ID_GROWTH_YEARLY:
|
||||
return 'growth';
|
||||
default:
|
||||
return 'starter';
|
||||
return 'free';
|
||||
}
|
||||
}
|
|
@ -15,7 +15,8 @@ export async function scrapeHelper(
|
|||
crawlerOptions: any,
|
||||
pageOptions: PageOptions,
|
||||
extractorOptions: ExtractorOptions,
|
||||
timeout: number
|
||||
timeout: number,
|
||||
plan?: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
error?: string;
|
||||
|
@ -64,7 +65,9 @@ export async function scrapeHelper(
|
|||
}
|
||||
|
||||
let creditsToBeBilled = filteredDocs.length;
|
||||
const creditsPerLLMExtract = 5;
|
||||
const creditsPerLLMExtract = plan === "starter" ? 5 : 50;
|
||||
|
||||
|
||||
|
||||
if (extractorOptions.mode === "llm-extraction") {
|
||||
creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length);
|
||||
|
@ -93,7 +96,7 @@ export async function scrapeHelper(
|
|||
export async function scrapeController(req: Request, res: Response) {
|
||||
try {
|
||||
// make sure to authenticate user first, Bearer <token>
|
||||
const { success, team_id, error, status } = await authenticateUser(
|
||||
const { success, team_id, error, status, plan } = await authenticateUser(
|
||||
req,
|
||||
res,
|
||||
RateLimiterMode.Scrape
|
||||
|
@ -129,7 +132,8 @@ export async function scrapeController(req: Request, res: Response) {
|
|||
crawlerOptions,
|
||||
pageOptions,
|
||||
extractorOptions,
|
||||
timeout
|
||||
timeout,
|
||||
plan
|
||||
);
|
||||
const endTime = new Date().getTime();
|
||||
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
||||
|
|
|
@ -168,3 +168,6 @@ app.get('/serverHealthCheck/notify', async (req, res) => {
|
|||
app.get("/is-production", (req, res) => {
|
||||
res.send({ isProduction: global.isProduction });
|
||||
});
|
||||
|
||||
|
||||
// /workers health check, cant act as load balancer, just has to be a pre deploy thing
|
|
@ -1,7 +1,7 @@
|
|||
import { withAuth } from "../../lib/withAuth";
|
||||
import { supabase_service } from "../supabase";
|
||||
|
||||
const FREE_CREDITS = 300;
|
||||
const FREE_CREDITS = 500;
|
||||
|
||||
export async function billTeam(team_id: string, credits: number) {
|
||||
return withAuth(supaBillTeam)(team_id, credits);
|
||||
|
|
|
@ -2,133 +2,68 @@ import { RateLimiterRedis } from "rate-limiter-flexible";
|
|||
import * as redis from "redis";
|
||||
import { RateLimiterMode } from "../../src/types";
|
||||
|
||||
const MAX_CRAWLS_PER_MINUTE_STARTER = 3;
|
||||
const MAX_CRAWLS_PER_MINUTE_STANDARD = 5;
|
||||
const MAX_CRAWLS_PER_MINUTE_SCALE = 20;
|
||||
|
||||
const MAX_SCRAPES_PER_MINUTE_STARTER = 20;
|
||||
const MAX_SCRAPES_PER_MINUTE_STANDARD = 40;
|
||||
const MAX_SCRAPES_PER_MINUTE_SCALE = 50;
|
||||
|
||||
const MAX_SEARCHES_PER_MINUTE_STARTER = 20;
|
||||
const MAX_SEARCHES_PER_MINUTE_STANDARD = 40;
|
||||
const MAX_SEARCHES_PER_MINUTE_SCALE = 50;
|
||||
|
||||
const MAX_REQUESTS_PER_MINUTE_PREVIEW = 5;
|
||||
const MAX_REQUESTS_PER_MINUTE_ACCOUNT = 20;
|
||||
const MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS = 150;
|
||||
const RATE_LIMITS = {
|
||||
crawl: {
|
||||
free: 1,
|
||||
starter: 3,
|
||||
standard: 5,
|
||||
scale: 20,
|
||||
hobby: 3,
|
||||
standardNew: 10,
|
||||
growth: 50,
|
||||
},
|
||||
scrape: {
|
||||
free: 5,
|
||||
starter: 20,
|
||||
standardOld: 40,
|
||||
scale: 50,
|
||||
hobby: 10,
|
||||
standardNew: 50,
|
||||
growth: 500,
|
||||
},
|
||||
search: {
|
||||
free: 5,
|
||||
starter: 20,
|
||||
standard: 40,
|
||||
scale: 50,
|
||||
hobby: 10,
|
||||
standardNew: 50,
|
||||
growth: 500,
|
||||
},
|
||||
preview: 5,
|
||||
account: 20,
|
||||
crawlStatus: 150,
|
||||
testSuite: 10000,
|
||||
};
|
||||
|
||||
export const redisClient = redis.createClient({
|
||||
url: process.env.REDIS_URL,
|
||||
legacyMode: true,
|
||||
});
|
||||
|
||||
export const previewRateLimiter = new RateLimiterRedis({
|
||||
const createRateLimiter = (keyPrefix, points) => new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "preview",
|
||||
points: MAX_REQUESTS_PER_MINUTE_PREVIEW,
|
||||
keyPrefix,
|
||||
points,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
|
||||
export const serverRateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "server",
|
||||
points: MAX_REQUESTS_PER_MINUTE_ACCOUNT,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
export const previewRateLimiter = createRateLimiter("preview", RATE_LIMITS.preview);
|
||||
export const serverRateLimiter = createRateLimiter("server", RATE_LIMITS.account);
|
||||
export const crawlStatusRateLimiter = createRateLimiter("crawl-status", RATE_LIMITS.crawlStatus);
|
||||
export const testSuiteRateLimiter = createRateLimiter("test-suite", RATE_LIMITS.testSuite);
|
||||
|
||||
export const crawlStatusRateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "crawl-status",
|
||||
points: MAX_REQUESTS_PER_MINUTE_CRAWL_STATUS,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
|
||||
export const testSuiteRateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "test-suite",
|
||||
points: 10000,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
|
||||
|
||||
export function getRateLimiter(mode: RateLimiterMode, token: string, plan?: string){
|
||||
// Special test suite case. TODO: Change this later.
|
||||
if (token.includes("5089cefa58") || token.includes("6254cf9")){
|
||||
export function getRateLimiter(mode: RateLimiterMode, token: string, plan?: string) {
|
||||
if (token.includes("5089cefa58") || token.includes("6254cf9")) {
|
||||
return testSuiteRateLimiter;
|
||||
}
|
||||
switch (mode) {
|
||||
case RateLimiterMode.Preview:
|
||||
return previewRateLimiter;
|
||||
case RateLimiterMode.CrawlStatus:
|
||||
return crawlStatusRateLimiter;
|
||||
case RateLimiterMode.Crawl:
|
||||
if (plan === "standard"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "crawl-standard",
|
||||
points: MAX_CRAWLS_PER_MINUTE_STANDARD,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
} else if (plan === "scale"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "crawl-scale",
|
||||
points: MAX_CRAWLS_PER_MINUTE_SCALE,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
}
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "crawl-starter",
|
||||
points: MAX_CRAWLS_PER_MINUTE_STARTER,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
case RateLimiterMode.Scrape:
|
||||
if (plan === "standard"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "scrape-standard",
|
||||
points: MAX_SCRAPES_PER_MINUTE_STANDARD,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
} else if (plan === "scale"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "scrape-scale",
|
||||
points: MAX_SCRAPES_PER_MINUTE_SCALE,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
}
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "scrape-starter",
|
||||
points: MAX_SCRAPES_PER_MINUTE_STARTER,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
case RateLimiterMode.Search:
|
||||
if (plan === "standard"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "search-standard",
|
||||
points: MAX_SEARCHES_PER_MINUTE_STANDARD,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
} else if (plan === "scale"){
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "search-scale",
|
||||
points: MAX_SEARCHES_PER_MINUTE_SCALE,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
}
|
||||
return new RateLimiterRedis({
|
||||
storeClient: redisClient,
|
||||
keyPrefix: "search-starter",
|
||||
points: MAX_SEARCHES_PER_MINUTE_STARTER,
|
||||
duration: 60, // Duration in seconds
|
||||
});
|
||||
default:
|
||||
return serverRateLimiter;
|
||||
}
|
||||
|
||||
|
||||
const rateLimitConfig = RATE_LIMITS[mode];
|
||||
if (!rateLimitConfig) return serverRateLimiter;
|
||||
|
||||
const planKey = plan ? plan.replace("-", "") : "starter";
|
||||
const points = rateLimitConfig[planKey] || rateLimitConfig.preview;
|
||||
|
||||
return createRateLimiter(`${mode}-${planKey}`, points);
|
||||
}
|
||||
|
|
|
@ -57,6 +57,7 @@ export interface AuthResponse {
|
|||
team_id?: string;
|
||||
error?: string;
|
||||
status?: number;
|
||||
plan?: string;
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user