diff --git a/apps/api/src/controllers/v1/batch-scrape.ts b/apps/api/src/controllers/v1/batch-scrape.ts index b018dc99..9fd5cc50 100644 --- a/apps/api/src/controllers/v1/batch-scrape.ts +++ b/apps/api/src/controllers/v1/batch-scrape.ts @@ -66,6 +66,7 @@ export async function batchScrapeController( crawl_id: id, sitemapped: true, v1: true, + webhook: req.body.webhook, }, opts: { jobId: uuidv4(), diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts index b2edd6e7..d885e128 100644 --- a/apps/api/src/controllers/v1/types.ts +++ b/apps/api/src/controllers/v1/types.ts @@ -175,9 +175,21 @@ export const scrapeRequestSchema = scrapeOptions.extend({ export type ScrapeRequest = z.infer; export type ScrapeRequestInput = z.input; +export const webhookSchema = z.preprocess(x => { + if (typeof x === "string") { + return { url: x }; + } else { + return x; + } +}, z.object({ + url: z.string().url(), + headers: z.record(z.string(), z.string()).default({}), +}).strict(strictMessage)) + export const batchScrapeRequestSchema = scrapeOptions.extend({ urls: url.array(), origin: z.string().optional().default("api"), + webhook: webhookSchema.optional(), }).strict(strictMessage).refine( (obj) => { const hasExtractFormat = obj.formats?.includes("extract"); @@ -220,17 +232,6 @@ const crawlerOptions = z.object({ export type CrawlerOptions = z.infer; -export const webhookSchema = z.preprocess(x => { - if (typeof x === "string") { - return { url: x }; - } else { - return x; - } -}, z.object({ - url: z.string().url(), - headers: z.record(z.string(), z.string()).default({}), -}).strict(strictMessage)) - export const crawlRequestSchema = crawlerOptions.extend({ url, origin: z.string().optional().default("api"), diff --git a/apps/js-sdk/firecrawl/src/index.ts b/apps/js-sdk/firecrawl/src/index.ts index 45e19197..18038945 100644 --- a/apps/js-sdk/firecrawl/src/index.ts +++ b/apps/js-sdk/firecrawl/src/index.ts @@ -543,16 +543,18 @@ export default class FirecrawlApp { * @param params - Additional parameters for the scrape request. * @param pollInterval - Time in seconds for job status checks. * @param idempotencyKey - Optional idempotency key for the request. + * @param webhook - Optional webhook for the batch scrape. * @returns The response from the crawl operation. */ async batchScrapeUrls( urls: string[], params?: ScrapeParams, pollInterval: number = 2, - idempotencyKey?: string + idempotencyKey?: string, + webhook?: CrawlParams["webhook"], ): Promise { const headers = this.prepareHeaders(idempotencyKey); - let jsonData: any = { urls, ...(params ?? {}) }; + let jsonData: any = { urls, ...(params ?? {}), webhook }; try { const response: AxiosResponse = await this.postRequest( this.apiUrl + `/v1/batch/scrape`,