feat(v1/batch/scrape): webhooks

This commit is contained in:
Móricz Gergő 2024-11-14 22:36:28 +01:00
parent 86a78a03cb
commit df05124ef5
3 changed files with 17 additions and 13 deletions

View File

@ -66,6 +66,7 @@ export async function batchScrapeController(
crawl_id: id,
sitemapped: true,
v1: true,
webhook: req.body.webhook,
},
opts: {
jobId: uuidv4(),

View File

@ -175,9 +175,21 @@ export const scrapeRequestSchema = scrapeOptions.extend({
export type ScrapeRequest = z.infer<typeof scrapeRequestSchema>;
export type ScrapeRequestInput = z.input<typeof scrapeRequestSchema>;
export const webhookSchema = z.preprocess(x => {
if (typeof x === "string") {
return { url: x };
} else {
return x;
}
}, z.object({
url: z.string().url(),
headers: z.record(z.string(), z.string()).default({}),
}).strict(strictMessage))
export const batchScrapeRequestSchema = scrapeOptions.extend({
urls: url.array(),
origin: z.string().optional().default("api"),
webhook: webhookSchema.optional(),
}).strict(strictMessage).refine(
(obj) => {
const hasExtractFormat = obj.formats?.includes("extract");
@ -220,17 +232,6 @@ const crawlerOptions = z.object({
export type CrawlerOptions = z.infer<typeof crawlerOptions>;
export const webhookSchema = z.preprocess(x => {
if (typeof x === "string") {
return { url: x };
} else {
return x;
}
}, z.object({
url: z.string().url(),
headers: z.record(z.string(), z.string()).default({}),
}).strict(strictMessage))
export const crawlRequestSchema = crawlerOptions.extend({
url,
origin: z.string().optional().default("api"),

View File

@ -543,16 +543,18 @@ export default class FirecrawlApp {
* @param params - Additional parameters for the scrape request.
* @param pollInterval - Time in seconds for job status checks.
* @param idempotencyKey - Optional idempotency key for the request.
* @param webhook - Optional webhook for the batch scrape.
* @returns The response from the crawl operation.
*/
async batchScrapeUrls(
urls: string[],
params?: ScrapeParams,
pollInterval: number = 2,
idempotencyKey?: string
idempotencyKey?: string,
webhook?: CrawlParams["webhook"],
): Promise<BatchScrapeStatusResponse | ErrorResponse> {
const headers = this.prepareHeaders(idempotencyKey);
let jsonData: any = { urls, ...(params ?? {}) };
let jsonData: any = { urls, ...(params ?? {}), webhook };
try {
const response: AxiosResponse = await this.postRequest(
this.apiUrl + `/v1/batch/scrape`,