mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 11:42:24 +08:00
8d467c8ca7
* feat: use strictNullChecking * feat: switch logger to Winston * feat(scrapeURL): first batch * fix(scrapeURL): error swallow * fix(scrapeURL): add timeout to EngineResultsTracker * fix(scrapeURL): report unexpected error to sentry * chore: remove unused modules * feat(transfomers/coerce): warn when a format's response is missing * feat(scrapeURL): feature flag priorities, engine quality sorting, PDF and DOCX support * (add note) * feat(scrapeURL): wip readme * feat(scrapeURL): LLM extract * feat(scrapeURL): better warnings * fix(scrapeURL/engines/fire-engine;playwright): fix screenshot * feat(scrapeURL): add forceEngine internal option * feat(scrapeURL/engines): scrapingbee * feat(scrapeURL/transformars): uploadScreenshot * feat(scrapeURL): more intense tests * bunch of stuff * get rid of WebScraper (mostly) * adapt batch scrape * add staging deploy workflow * fix yaml * fix logger issues * fix v1 test schema * feat(scrapeURL/fire-engine/chrome-cdp): remove wait inserts on actions * scrapeURL: v0 backwards compat * logger fixes * feat(scrapeurl): v0 returnOnlyUrls support * fix(scrapeURL/v0): URL leniency * fix(batch-scrape): ts non-nullable * fix(scrapeURL/fire-engine/chromecdp): fix wait action * fix(logger): remove error debug key * feat(requests.http): use dotenv expression * fix(scrapeURL/extractMetadata): extract custom metadata * fix crawl option conversion * feat(scrapeURL): Add retry logic to robustFetch * fix(scrapeURL): crawl stuff * fix(scrapeURL): LLM extract * fix(scrapeURL/v0): search fix * fix(tests/v0): grant larger response size to v0 crawl status * feat(scrapeURL): basic fetch engine * feat(scrapeURL): playwright engine * feat(scrapeURL): add url-specific parameters * Update readme and examples * added e2e tests for most parameters. Still a few actions, location and iframes to be done. * fixed type * Nick: * Update scrape.ts * Update index.ts * added actions and base64 check * Nick: skipTls feature flag? * 403 * todo * todo * fixes * yeet headers from url specific params * add warning when final engine has feature deficit * expose engine results tracker for ScrapeEvents implementation * ingest scrape events * fixed some tests * comment * Update index.test.ts * fixed rawHtml * Update index.test.ts * update comments * move geolocation to global f-e option, fix removeBase64Images * Nick: * trim url-specific params * Update index.ts --------- Co-authored-by: Eric Ciarla <ericciarla@yahoo.com> Co-authored-by: rafaelmmiller <8574157+rafaelmmiller@users.noreply.github.com> Co-authored-by: Nicolas <nicolascamara29@gmail.com>
43 lines
1.0 KiB
TypeScript
43 lines
1.0 KiB
TypeScript
import express from "express";
|
|
import { redisHealthController } from "../controllers/v0/admin/redis-health";
|
|
import {
|
|
autoscalerController,
|
|
checkQueuesController,
|
|
cleanBefore24hCompleteJobsController,
|
|
queuesController,
|
|
} from "../controllers/v0/admin/queue";
|
|
import { wrap } from "./v1";
|
|
import { acucCacheClearController } from "../controllers/v0/admin/acuc-cache-clear";
|
|
|
|
export const adminRouter = express.Router();
|
|
|
|
adminRouter.get(
|
|
`/admin/${process.env.BULL_AUTH_KEY}/redis-health`,
|
|
redisHealthController
|
|
);
|
|
|
|
adminRouter.get(
|
|
`/admin/${process.env.BULL_AUTH_KEY}/clean-before-24h-complete-jobs`,
|
|
cleanBefore24hCompleteJobsController
|
|
);
|
|
|
|
adminRouter.get(
|
|
`/admin/${process.env.BULL_AUTH_KEY}/check-queues`,
|
|
checkQueuesController
|
|
);
|
|
|
|
adminRouter.get(
|
|
`/admin/${process.env.BULL_AUTH_KEY}/queues`,
|
|
queuesController
|
|
);
|
|
|
|
adminRouter.get(
|
|
`/admin/${process.env.BULL_AUTH_KEY}/autoscaler`,
|
|
autoscalerController
|
|
);
|
|
|
|
adminRouter.post(
|
|
`/admin/${process.env.BULL_AUTH_KEY}/acuc-cache-clear`,
|
|
wrap(acucCacheClearController),
|
|
);
|