Nick: fixes

This commit is contained in:
Nicolas 2024-10-23 15:59:40 -03:00
parent d8abd15716
commit 60b6e6b1d4
2 changed files with 31 additions and 5 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "firecrawl", "name": "@mendable/firecrawl-js",
"version": "1.6.1", "version": "1.7.0-beta.2",
"description": "JavaScript SDK for Firecrawl API", "description": "JavaScript SDK for Firecrawl API",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

View File

@ -154,6 +154,17 @@ export interface CrawlResponse {
error?: string; error?: string;
} }
/**
* Response interface for crawling operations.
* Defines the structure of the response received after initiating a crawl.
*/
export interface BatchScrapeResponse {
id?: string;
url?: string;
success: true;
error?: string;
}
/** /**
* Response interface for job status checks. * Response interface for job status checks.
* Provides detailed status of a crawl job including progress and results. * Provides detailed status of a crawl job including progress and results.
@ -169,6 +180,21 @@ export interface CrawlStatusResponse {
data: FirecrawlDocument<undefined>[]; data: FirecrawlDocument<undefined>[];
}; };
/**
* Response interface for job status checks.
* Provides detailed status of a crawl job including progress and results.
*/
export interface BatchScrapeStatusResponse {
success: true;
status: "scraping" | "completed" | "failed" | "cancelled";
completed: number;
total: number;
creditsUsed: number;
expiresAt: Date;
next?: string;
data: FirecrawlDocument<undefined>[];
};
/** /**
* Parameters for mapping operations. * Parameters for mapping operations.
* Defines options for mapping URLs during a crawl. * Defines options for mapping URLs during a crawl.
@ -506,7 +532,7 @@ export default class FirecrawlApp {
params?: ScrapeParams, params?: ScrapeParams,
pollInterval: number = 2, pollInterval: number = 2,
idempotencyKey?: string idempotencyKey?: string
): Promise<CrawlStatusResponse | ErrorResponse> { ): Promise<BatchScrapeStatusResponse | ErrorResponse> {
const headers = this.prepareHeaders(idempotencyKey); const headers = this.prepareHeaders(idempotencyKey);
let jsonData: any = { urls, ...(params ?? {}) }; let jsonData: any = { urls, ...(params ?? {}) };
try { try {
@ -535,7 +561,7 @@ export default class FirecrawlApp {
urls: string[], urls: string[],
params?: ScrapeParams, params?: ScrapeParams,
idempotencyKey?: string idempotencyKey?: string
): Promise<CrawlResponse | ErrorResponse> { ): Promise<BatchScrapeResponse | ErrorResponse> {
const headers = this.prepareHeaders(idempotencyKey); const headers = this.prepareHeaders(idempotencyKey);
let jsonData: any = { urls, ...(params ?? {}) }; let jsonData: any = { urls, ...(params ?? {}) };
try { try {
@ -587,7 +613,7 @@ export default class FirecrawlApp {
* @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`) * @param getAllData - Paginate through all the pages of documents, returning the full list of all documents. (default: `false`)
* @returns The response containing the job status. * @returns The response containing the job status.
*/ */
async checkBatchScrapeStatus(id?: string, getAllData = false): Promise<CrawlStatusResponse | ErrorResponse> { async checkBatchScrapeStatus(id?: string, getAllData = false): Promise<BatchScrapeStatusResponse | ErrorResponse> {
if (!id) { if (!id) {
throw new FirecrawlError("No batch scrape ID provided", 400); throw new FirecrawlError("No batch scrape ID provided", 400);
} }