mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 11:42:24 +08:00
Compare commits
14 Commits
9a8a6506e9
...
3f2914b97f
Author | SHA1 | Date | |
---|---|---|---|
|
3f2914b97f | ||
|
3a342bfbf0 | ||
|
3c1b1909f8 | ||
|
9519897102 | ||
|
7f084c6c43 | ||
|
e8bd089c8a | ||
|
3fcdf57d2f | ||
|
d62f12c9d9 | ||
|
f155449458 | ||
|
431e64e752 | ||
|
7bca4486b4 | ||
|
df05124ef5 | ||
|
9298a05045 | ||
|
faf11acf82 |
2
apps/api/.gitignore
vendored
2
apps/api/.gitignore
vendored
|
@ -9,3 +9,5 @@ dump.rdb
|
||||||
|
|
||||||
.rdb
|
.rdb
|
||||||
.sentryclirc
|
.sentryclirc
|
||||||
|
|
||||||
|
doctor-*.html
|
||||||
|
|
|
@ -47,3 +47,12 @@ content-type: application/json
|
||||||
# @name batchScrapeStatus
|
# @name batchScrapeStatus
|
||||||
GET {{baseUrl}}/v1/crawl/{{batchScrapeId}} HTTP/1.1
|
GET {{baseUrl}}/v1/crawl/{{batchScrapeId}} HTTP/1.1
|
||||||
Authorization: Bearer {{$dotenv TEST_API_KEY}}
|
Authorization: Bearer {{$dotenv TEST_API_KEY}}
|
||||||
|
|
||||||
|
### URL Doctor
|
||||||
|
# @name urlDoctor
|
||||||
|
POST {{baseUrl}}/admin/{{$dotenv BULL_AUTH_KEY}}/doctor HTTP/1.1
|
||||||
|
Content-Type: application/json
|
||||||
|
|
||||||
|
{
|
||||||
|
"url": "https://firecrawl.dev"
|
||||||
|
}
|
104
apps/api/src/controllers/v1/admin/doctor-status.ts
Normal file
104
apps/api/src/controllers/v1/admin/doctor-status.ts
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
import { Request, Response } from "express";
|
||||||
|
import { logger as _logger } from "../../../lib/logger";
|
||||||
|
import { ScrapeUrlResponse } from "../../../scraper/scrapeURL";
|
||||||
|
import { getScrapeQueue, redisConnection } from "../../../services/queue-service";
|
||||||
|
import type { Permutation } from "./doctor";
|
||||||
|
import { Job } from "bullmq";
|
||||||
|
|
||||||
|
const logger = _logger.child({ module: "doctorStatusController" });
|
||||||
|
|
||||||
|
const errorReplacer = (_, value) => {
|
||||||
|
if (value instanceof Error) {
|
||||||
|
return {
|
||||||
|
...value,
|
||||||
|
name: value.name,
|
||||||
|
message: value.message,
|
||||||
|
stack: value.stack,
|
||||||
|
cause: value.cause,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
type PermutationResult = ({
|
||||||
|
state: "done",
|
||||||
|
result: ScrapeUrlResponse & {
|
||||||
|
success: true
|
||||||
|
},
|
||||||
|
} | {
|
||||||
|
state: "thrownError",
|
||||||
|
error: string | Error | null | undefined,
|
||||||
|
} | {
|
||||||
|
state: "error",
|
||||||
|
result: ScrapeUrlResponse & {
|
||||||
|
success: false
|
||||||
|
},
|
||||||
|
} | {
|
||||||
|
state: "pending",
|
||||||
|
}) & {
|
||||||
|
permutation: Permutation,
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function doctorStatusController(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const doctorId = req.params.id;
|
||||||
|
|
||||||
|
const meta: { url: string } | null = JSON.parse(await redisConnection.get("doctor:" + doctorId) ?? "null");
|
||||||
|
const permutations: Permutation[] | null = JSON.parse(await redisConnection.get("doctor:" + doctorId + ":permutations") ?? "null");
|
||||||
|
if (permutations === null || meta === null) {
|
||||||
|
return res.status(404).json({ error: "Doctor entry not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = (await Promise.all(permutations.map(x => getScrapeQueue().getJob(x.jobId)))).filter(x => x) as Job<unknown, ScrapeUrlResponse>[];
|
||||||
|
|
||||||
|
const results: PermutationResult[] = await Promise.all(jobs.map(async job => {
|
||||||
|
const permutation = permutations.find(x => x.jobId === job.id)!;
|
||||||
|
const state = await job.getState();
|
||||||
|
if (state === "completed" && job.data) {
|
||||||
|
if (job.returnvalue.success) {
|
||||||
|
return {
|
||||||
|
state: "done",
|
||||||
|
result: job.returnvalue,
|
||||||
|
permutation,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
state: "error",
|
||||||
|
result: job.returnvalue,
|
||||||
|
permutation,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (state === "failed") {
|
||||||
|
return {
|
||||||
|
state: "thrownError",
|
||||||
|
error: job.failedReason,
|
||||||
|
permutation,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
state: "pending",
|
||||||
|
permutation,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
const html = "<head><meta charset=\"utf8\"></head><body style=\"font-family: sans-serif; padding: 1rem;\"><h1>Doctor</h1><p>URL: <code>" + meta.url + "</code></p>"
|
||||||
|
+ results.map(x => "<h2>" + (x.state === "pending" ? "⏳" : x.state === "done" ? "✅" : "❌") + " " + x.permutation.name + "</h2><p>Scrape options: <code>" + JSON.stringify(x.permutation.options) + "</code></p>"
|
||||||
|
+ "<p>Internal options: <code>" + JSON.stringify(x.permutation.internal) + "</code></p>"
|
||||||
|
+ (x.state !== "pending" ? ("<code><pre>" + ((x.state === "done"
|
||||||
|
? JSON.stringify(x.result, errorReplacer, 4)
|
||||||
|
: x.state === "thrownError"
|
||||||
|
? (x.error instanceof Error
|
||||||
|
? (x.error.message + "\n" + (x.error.stack ?? ""))
|
||||||
|
: (x.error ?? "<unknown error>"))
|
||||||
|
: (JSON.stringify(x.result, errorReplacer, 4))))
|
||||||
|
.replaceAll("<", "<").replaceAll(">", ">") + "</pre></code>"): "")).join("")
|
||||||
|
+ "</body>"
|
||||||
|
|
||||||
|
res.header("Content-Type", "text/html").send(html);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Doctor status error", { error });
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
84
apps/api/src/controllers/v1/admin/doctor.ts
Normal file
84
apps/api/src/controllers/v1/admin/doctor.ts
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
import { Request, Response } from "express";
|
||||||
|
import { logger as _logger } from "../../../lib/logger";
|
||||||
|
import { ScrapeUrlResponse, InternalOptions } from "../../../scraper/scrapeURL";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { scrapeOptions } from "../types";
|
||||||
|
import { Engine, engineOptions, engines } from "../../../scraper/scrapeURL/engines";
|
||||||
|
import { addScrapeJob, addScrapeJobs } from "../../../services/queue-jobs";
|
||||||
|
import { redisConnection } from "../../../services/queue-service";
|
||||||
|
|
||||||
|
const logger = _logger.child({ module: "doctorController" });
|
||||||
|
|
||||||
|
export type Permutation = {
|
||||||
|
options: z.input<typeof scrapeOptions>,
|
||||||
|
internal: InternalOptions,
|
||||||
|
name: string,
|
||||||
|
jobId: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function doctorController(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const doctorId = crypto.randomUUID();
|
||||||
|
|
||||||
|
const permutations: Permutation[] = [
|
||||||
|
{ options: {}, internal: { verbose: true }, name: "bare", jobId: crypto.randomUUID() },
|
||||||
|
...Object.entries(engineOptions).filter(([name, options]) => options.quality > 0 && engines.includes(name as Engine)).map(([name, _options]) => ({
|
||||||
|
options: {}, internal: { forceEngine: name as Engine, verbose: true }, name, jobId: crypto.randomUUID(),
|
||||||
|
})),
|
||||||
|
];
|
||||||
|
|
||||||
|
await addScrapeJobs(permutations.map(perm => ({
|
||||||
|
data: {
|
||||||
|
url: req.body.url,
|
||||||
|
mode: "single_urls",
|
||||||
|
team_id: null,
|
||||||
|
scrapeOptions: scrapeOptions.parse(perm.options),
|
||||||
|
internalOptions: perm.internal,
|
||||||
|
plan: null,
|
||||||
|
origin: "doctor",
|
||||||
|
is_scrape: true,
|
||||||
|
doctor: true,
|
||||||
|
},
|
||||||
|
opts: {
|
||||||
|
jobId: perm.jobId,
|
||||||
|
priority: 10,
|
||||||
|
},
|
||||||
|
})));
|
||||||
|
|
||||||
|
await redisConnection.set("doctor:" + doctorId, JSON.stringify({ url: req.body.url }), "EX", 86400);
|
||||||
|
await redisConnection.set("doctor:" + doctorId + ":permutations", JSON.stringify(permutations), "EX", 86400);
|
||||||
|
|
||||||
|
const protocol = process.env.ENV === "local" ? req.protocol : "https";
|
||||||
|
|
||||||
|
res.json({ ok: true, id: doctorId, url: `${protocol}://${req.get("host")}/admin/${process.env.BULL_AUTH_KEY}/doctor/${doctorId}` });
|
||||||
|
|
||||||
|
// await Promise.all(permutations.map(async perm => {
|
||||||
|
// try {
|
||||||
|
// const result = await scrapeURL(doctorId + ":bare", url, scrapeOptions.parse(perm.options), perm.internal);
|
||||||
|
// if (result.success) {
|
||||||
|
// results.push({
|
||||||
|
// state: "done",
|
||||||
|
// result,
|
||||||
|
// permutation: perm,
|
||||||
|
// });
|
||||||
|
// } else {
|
||||||
|
// results.push({
|
||||||
|
// state: "error",
|
||||||
|
// result,
|
||||||
|
// permutation: perm,
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
// } catch (error) {
|
||||||
|
// console.error("Permutation " + perm.name + " failed with error", { error });
|
||||||
|
// results.push({
|
||||||
|
// state: "thrownError",
|
||||||
|
// error,
|
||||||
|
// permutation: perm,
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
// }));
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Doctor error", { error });
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,6 +16,7 @@ import { logCrawl } from "../../services/logging/crawl_log";
|
||||||
import { getScrapeQueue } from "../../services/queue-service";
|
import { getScrapeQueue } from "../../services/queue-service";
|
||||||
import { getJobPriority } from "../../lib/job-priority";
|
import { getJobPriority } from "../../lib/job-priority";
|
||||||
import { addScrapeJobs } from "../../services/queue-jobs";
|
import { addScrapeJobs } from "../../services/queue-jobs";
|
||||||
|
import { callWebhook } from "../../services/webhook";
|
||||||
|
|
||||||
export async function batchScrapeController(
|
export async function batchScrapeController(
|
||||||
req: RequestWithAuth<{}, CrawlResponse, BatchScrapeRequest>,
|
req: RequestWithAuth<{}, CrawlResponse, BatchScrapeRequest>,
|
||||||
|
@ -66,6 +67,7 @@ export async function batchScrapeController(
|
||||||
crawl_id: id,
|
crawl_id: id,
|
||||||
sitemapped: true,
|
sitemapped: true,
|
||||||
v1: true,
|
v1: true,
|
||||||
|
webhook: req.body.webhook,
|
||||||
},
|
},
|
||||||
opts: {
|
opts: {
|
||||||
jobId: uuidv4(),
|
jobId: uuidv4(),
|
||||||
|
@ -85,6 +87,10 @@ export async function batchScrapeController(
|
||||||
);
|
);
|
||||||
await addScrapeJobs(jobs);
|
await addScrapeJobs(jobs);
|
||||||
|
|
||||||
|
if(req.body.webhook) {
|
||||||
|
await callWebhook(req.auth.team_id, id, null, req.body.webhook, true, "batch_scrape.started");
|
||||||
|
}
|
||||||
|
|
||||||
const protocol = process.env.ENV === "local" ? req.protocol : "https";
|
const protocol = process.env.ENV === "local" ? req.protocol : "https";
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
import { Response } from "express";
|
import { Response } from "express";
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import {
|
import { mapRequestSchema, RequestWithAuth, scrapeOptions } from "./types";
|
||||||
mapRequestSchema,
|
|
||||||
RequestWithAuth,
|
|
||||||
scrapeOptions,
|
|
||||||
} from "./types";
|
|
||||||
import { crawlToCrawler, StoredCrawl } from "../../lib/crawl-redis";
|
import { crawlToCrawler, StoredCrawl } from "../../lib/crawl-redis";
|
||||||
import { MapResponse, MapRequest } from "./types";
|
import { MapResponse, MapRequest } from "./types";
|
||||||
import { configDotenv } from "dotenv";
|
import { configDotenv } from "dotenv";
|
||||||
|
@ -46,6 +42,7 @@ export async function mapController(
|
||||||
originUrl: req.body.url,
|
originUrl: req.body.url,
|
||||||
crawlerOptions: {
|
crawlerOptions: {
|
||||||
...req.body,
|
...req.body,
|
||||||
|
limit: req.body.sitemapOnly ? 10000000 : limit,
|
||||||
scrapeOptions: undefined,
|
scrapeOptions: undefined,
|
||||||
},
|
},
|
||||||
scrapeOptions: scrapeOptions.parse({}),
|
scrapeOptions: scrapeOptions.parse({}),
|
||||||
|
@ -57,6 +54,16 @@ export async function mapController(
|
||||||
|
|
||||||
const crawler = crawlToCrawler(id, sc);
|
const crawler = crawlToCrawler(id, sc);
|
||||||
|
|
||||||
|
// If sitemapOnly is true, only get links from sitemap
|
||||||
|
if (req.body.sitemapOnly) {
|
||||||
|
const sitemap = await crawler.tryGetSitemap(true, true);
|
||||||
|
if (sitemap !== null) {
|
||||||
|
sitemap.forEach((x) => {
|
||||||
|
links.push(x.url);
|
||||||
|
});
|
||||||
|
links = links.slice(1, limit);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
let urlWithoutWww = req.body.url.replace("www.", "");
|
let urlWithoutWww = req.body.url.replace("www.", "");
|
||||||
|
|
||||||
let mapUrl = req.body.search
|
let mapUrl = req.body.search
|
||||||
|
@ -64,7 +71,9 @@ export async function mapController(
|
||||||
: `site:${req.body.url}`;
|
: `site:${req.body.url}`;
|
||||||
|
|
||||||
const resultsPerPage = 100;
|
const resultsPerPage = 100;
|
||||||
const maxPages = Math.ceil(Math.min(MAX_FIRE_ENGINE_RESULTS, limit) / resultsPerPage);
|
const maxPages = Math.ceil(
|
||||||
|
Math.min(MAX_FIRE_ENGINE_RESULTS, limit) / resultsPerPage
|
||||||
|
);
|
||||||
|
|
||||||
const cacheKey = `fireEngineMap:${mapUrl}`;
|
const cacheKey = `fireEngineMap:${mapUrl}`;
|
||||||
const cachedResult = null;
|
const cachedResult = null;
|
||||||
|
@ -82,7 +91,9 @@ export async function mapController(
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
pagePromises = Array.from({ length: maxPages }, (_, i) => fetchPage(i + 1));
|
pagePromises = Array.from({ length: maxPages }, (_, i) =>
|
||||||
|
fetchPage(i + 1)
|
||||||
|
);
|
||||||
allResults = await Promise.all(pagePromises);
|
allResults = await Promise.all(pagePromises);
|
||||||
|
|
||||||
await redis.set(cacheKey, JSON.stringify(allResults), "EX", 24 * 60 * 60); // Cache for 24 hours
|
await redis.set(cacheKey, JSON.stringify(allResults), "EX", 24 * 60 * 60); // Cache for 24 hours
|
||||||
|
@ -90,7 +101,7 @@ export async function mapController(
|
||||||
|
|
||||||
// Parallelize sitemap fetch with serper search
|
// Parallelize sitemap fetch with serper search
|
||||||
const [sitemap, ...searchResults] = await Promise.all([
|
const [sitemap, ...searchResults] = await Promise.all([
|
||||||
req.body.ignoreSitemap ? null : crawler.tryGetSitemap(),
|
req.body.ignoreSitemap ? null : crawler.tryGetSitemap(true),
|
||||||
...(cachedResult ? [] : pagePromises),
|
...(cachedResult ? [] : pagePromises),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -128,6 +139,8 @@ export async function mapController(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
// Perform cosine similarity between the search query and the list of links
|
// Perform cosine similarity between the search query and the list of links
|
||||||
if (req.body.search) {
|
if (req.body.search) {
|
||||||
const searchQuery = req.body.search.toLowerCase();
|
const searchQuery = req.body.search.toLowerCase();
|
||||||
|
|
|
@ -175,9 +175,21 @@ export const scrapeRequestSchema = scrapeOptions.extend({
|
||||||
export type ScrapeRequest = z.infer<typeof scrapeRequestSchema>;
|
export type ScrapeRequest = z.infer<typeof scrapeRequestSchema>;
|
||||||
export type ScrapeRequestInput = z.input<typeof scrapeRequestSchema>;
|
export type ScrapeRequestInput = z.input<typeof scrapeRequestSchema>;
|
||||||
|
|
||||||
|
export const webhookSchema = z.preprocess(x => {
|
||||||
|
if (typeof x === "string") {
|
||||||
|
return { url: x };
|
||||||
|
} else {
|
||||||
|
return x;
|
||||||
|
}
|
||||||
|
}, z.object({
|
||||||
|
url: z.string().url(),
|
||||||
|
headers: z.record(z.string(), z.string()).default({}),
|
||||||
|
}).strict(strictMessage))
|
||||||
|
|
||||||
export const batchScrapeRequestSchema = scrapeOptions.extend({
|
export const batchScrapeRequestSchema = scrapeOptions.extend({
|
||||||
urls: url.array(),
|
urls: url.array(),
|
||||||
origin: z.string().optional().default("api"),
|
origin: z.string().optional().default("api"),
|
||||||
|
webhook: webhookSchema.optional(),
|
||||||
}).strict(strictMessage).refine(
|
}).strict(strictMessage).refine(
|
||||||
(obj) => {
|
(obj) => {
|
||||||
const hasExtractFormat = obj.formats?.includes("extract");
|
const hasExtractFormat = obj.formats?.includes("extract");
|
||||||
|
@ -220,17 +232,6 @@ const crawlerOptions = z.object({
|
||||||
|
|
||||||
export type CrawlerOptions = z.infer<typeof crawlerOptions>;
|
export type CrawlerOptions = z.infer<typeof crawlerOptions>;
|
||||||
|
|
||||||
export const webhookSchema = z.preprocess(x => {
|
|
||||||
if (typeof x === "string") {
|
|
||||||
return { url: x };
|
|
||||||
} else {
|
|
||||||
return x;
|
|
||||||
}
|
|
||||||
}, z.object({
|
|
||||||
url: z.string().url(),
|
|
||||||
headers: z.record(z.string(), z.string()).default({}),
|
|
||||||
}).strict(strictMessage))
|
|
||||||
|
|
||||||
export const crawlRequestSchema = crawlerOptions.extend({
|
export const crawlRequestSchema = crawlerOptions.extend({
|
||||||
url,
|
url,
|
||||||
origin: z.string().optional().default("api"),
|
origin: z.string().optional().default("api"),
|
||||||
|
@ -260,6 +261,7 @@ export const mapRequestSchema = crawlerOptions.extend({
|
||||||
includeSubdomains: z.boolean().default(true),
|
includeSubdomains: z.boolean().default(true),
|
||||||
search: z.string().optional(),
|
search: z.string().optional(),
|
||||||
ignoreSitemap: z.boolean().default(false),
|
ignoreSitemap: z.boolean().default(false),
|
||||||
|
sitemapOnly: z.boolean().default(false),
|
||||||
limit: z.number().min(1).max(5000).default(5000),
|
limit: z.number().min(1).max(5000).default(5000),
|
||||||
}).strict(strictMessage);
|
}).strict(strictMessage);
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import * as winston from "winston";
|
import * as winston from "winston";
|
||||||
|
import Transport from "winston-transport";
|
||||||
|
|
||||||
import { configDotenv } from "dotenv";
|
import { configDotenv } from "dotenv";
|
||||||
configDotenv();
|
configDotenv();
|
||||||
|
@ -49,3 +50,33 @@ export const logger = winston.createLogger({
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export type ArrayTransportOptions = Transport.TransportStreamOptions & {
|
||||||
|
array: any[];
|
||||||
|
scrapeId?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class ArrayTransport extends Transport {
|
||||||
|
private array: any[];
|
||||||
|
private scrapeId?: string;
|
||||||
|
|
||||||
|
constructor(opts: ArrayTransportOptions) {
|
||||||
|
super(opts);
|
||||||
|
this.array = opts.array;
|
||||||
|
this.scrapeId = opts.scrapeId;
|
||||||
|
}
|
||||||
|
|
||||||
|
log(info, next) {
|
||||||
|
setImmediate(() => {
|
||||||
|
this.emit("logged", info);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.scrapeId !== undefined && info.scrapeId !== this.scrapeId) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.array.push(info);
|
||||||
|
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@ export async function runWebScraper({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(is_scrape === false) {
|
if(is_scrape === false && team_id) {
|
||||||
let creditsToBeBilled = 1; // Assuming 1 credit per document
|
let creditsToBeBilled = 1; // Assuming 1 credit per document
|
||||||
if (scrapeOptions.extract) {
|
if (scrapeOptions.extract) {
|
||||||
creditsToBeBilled = 5;
|
creditsToBeBilled = 5;
|
||||||
|
|
|
@ -8,6 +8,8 @@ import {
|
||||||
} from "../controllers/v0/admin/queue";
|
} from "../controllers/v0/admin/queue";
|
||||||
import { wrap } from "./v1";
|
import { wrap } from "./v1";
|
||||||
import { acucCacheClearController } from "../controllers/v0/admin/acuc-cache-clear";
|
import { acucCacheClearController } from "../controllers/v0/admin/acuc-cache-clear";
|
||||||
|
import { doctorController } from "../controllers/v1/admin/doctor";
|
||||||
|
import { doctorStatusController } from "../controllers/v1/admin/doctor-status";
|
||||||
|
|
||||||
export const adminRouter = express.Router();
|
export const adminRouter = express.Router();
|
||||||
|
|
||||||
|
@ -40,3 +42,13 @@ adminRouter.post(
|
||||||
`/admin/${process.env.BULL_AUTH_KEY}/acuc-cache-clear`,
|
`/admin/${process.env.BULL_AUTH_KEY}/acuc-cache-clear`,
|
||||||
wrap(acucCacheClearController),
|
wrap(acucCacheClearController),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
adminRouter.post(
|
||||||
|
`/admin/${process.env.BULL_AUTH_KEY}/doctor`,
|
||||||
|
wrap(doctorController),
|
||||||
|
);
|
||||||
|
|
||||||
|
adminRouter.get(
|
||||||
|
`/admin/${process.env.BULL_AUTH_KEY}/doctor/:id`,
|
||||||
|
wrap(doctorStatusController),
|
||||||
|
);
|
||||||
|
|
|
@ -65,7 +65,12 @@ export class WebCrawler {
|
||||||
this.allowExternalContentLinks = allowExternalContentLinks ?? false;
|
this.allowExternalContentLinks = allowExternalContentLinks ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
|
public filterLinks(sitemapLinks: string[], limit: number, maxDepth: number, fromMap: boolean = false): string[] {
|
||||||
|
// If the initial URL is a sitemap.xml, skip filtering
|
||||||
|
if (this.initialUrl.endsWith('sitemap.xml') && fromMap) {
|
||||||
|
return sitemapLinks.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
return sitemapLinks
|
return sitemapLinks
|
||||||
.filter((link) => {
|
.filter((link) => {
|
||||||
let url: URL;
|
let url: URL;
|
||||||
|
@ -159,11 +164,14 @@ export class WebCrawler {
|
||||||
this.robots = robotsParser(this.robotsTxtUrl, txt);
|
this.robots = robotsParser(this.robotsTxtUrl, txt);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async tryGetSitemap(): Promise<{ url: string; html: string; }[] | null> {
|
public async tryGetSitemap(fromMap: boolean = false, onlySitemap: boolean = false): Promise<{ url: string; html: string; }[] | null> {
|
||||||
logger.debug(`Fetching sitemap links from ${this.initialUrl}`);
|
logger.debug(`Fetching sitemap links from ${this.initialUrl}`);
|
||||||
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
|
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
|
||||||
|
if(fromMap && onlySitemap) {
|
||||||
|
return sitemapLinks.map(link => ({ url: link, html: "" }));
|
||||||
|
}
|
||||||
if (sitemapLinks.length > 0) {
|
if (sitemapLinks.length > 0) {
|
||||||
let filteredLinks = this.filterLinks(sitemapLinks, this.limit, this.maxCrawledDepth);
|
let filteredLinks = this.filterLinks(sitemapLinks, this.limit, this.maxCrawledDepth, fromMap);
|
||||||
return filteredLinks.map(link => ({ url: link, html: "" }));
|
return filteredLinks.map(link => ({ url: link, html: "" }));
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
@ -353,6 +361,7 @@ export class WebCrawler {
|
||||||
return url;
|
return url;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
const sitemapUrl = url.endsWith("/sitemap.xml")
|
const sitemapUrl = url.endsWith("/sitemap.xml")
|
||||||
? url
|
? url
|
||||||
: `${url}/sitemap.xml`;
|
: `${url}/sitemap.xml`;
|
||||||
|
|
|
@ -13,12 +13,12 @@ export async function scrapeURLWithPlaywright(meta: Meta): Promise<EngineScrapeR
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: {
|
||||||
url: meta.url,
|
url: meta.url,
|
||||||
wait_after_load: meta.options.waitFor,
|
wait_after_load: meta.options.waitFor,
|
||||||
timeout,
|
timeout,
|
||||||
headers: meta.options.headers,
|
headers: meta.options.headers,
|
||||||
}),
|
},
|
||||||
method: "POST",
|
method: "POST",
|
||||||
logger: meta.logger.child("scrapeURLWithPlaywright/robustFetch"),
|
logger: meta.logger.child("scrapeURLWithPlaywright/robustFetch"),
|
||||||
schema: z.object({
|
schema: z.object({
|
||||||
|
|
|
@ -2,7 +2,7 @@ import { Logger } from "winston";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
|
|
||||||
import { Document, ScrapeOptions } from "../../controllers/v1/types";
|
import { Document, ScrapeOptions } from "../../controllers/v1/types";
|
||||||
import { logger } from "../../lib/logger";
|
import { ArrayTransport, logger } from "../../lib/logger";
|
||||||
import { buildFallbackList, Engine, EngineScrapeResult, FeatureFlag, scrapeURLWithEngine } from "./engines";
|
import { buildFallbackList, Engine, EngineScrapeResult, FeatureFlag, scrapeURLWithEngine } from "./engines";
|
||||||
import { parseMarkdown } from "../../lib/html-to-markdown";
|
import { parseMarkdown } from "../../lib/html-to-markdown";
|
||||||
import { AddFeatureError, EngineError, NoEnginesLeftError, TimeoutError } from "./error";
|
import { AddFeatureError, EngineError, NoEnginesLeftError, TimeoutError } from "./error";
|
||||||
|
@ -97,6 +97,9 @@ function buildMetaObject(id: string, url: string, options: ScrapeOptions, intern
|
||||||
|
|
||||||
const _logger = logger.child({ module: "ScrapeURL", scrapeId: id, scrapeURL: url });
|
const _logger = logger.child({ module: "ScrapeURL", scrapeId: id, scrapeURL: url });
|
||||||
const logs: any[] = [];
|
const logs: any[] = [];
|
||||||
|
if (internalOptions.verbose) {
|
||||||
|
_logger.add(new ArrayTransport({ array: logs, scrapeId: id }));
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id, url, options, internalOptions,
|
id, url, options, internalOptions,
|
||||||
|
@ -114,6 +117,8 @@ export type InternalOptions = {
|
||||||
v0CrawlOnlyUrls?: boolean;
|
v0CrawlOnlyUrls?: boolean;
|
||||||
v0UseFastMode?: boolean;
|
v0UseFastMode?: boolean;
|
||||||
v0DisableJsDom?: boolean;
|
v0DisableJsDom?: boolean;
|
||||||
|
|
||||||
|
verbose?: boolean; // stores logs. will cause high memory usage. use with caution
|
||||||
};
|
};
|
||||||
|
|
||||||
export type EngineResultsTracker = { [E in Engine]?: ({
|
export type EngineResultsTracker = { [E in Engine]?: ({
|
||||||
|
@ -229,7 +234,7 @@ async function scrapeURLLoop(
|
||||||
throw error;
|
throw error;
|
||||||
} else {
|
} else {
|
||||||
Sentry.captureException(error);
|
Sentry.captureException(error);
|
||||||
meta.logger.info("An unexpected error happened while scraping with " + engine + ".", { error });
|
meta.logger.warn("An unexpected error happened while scraping with " + engine + ".", { error });
|
||||||
results[engine] = {
|
results[engine] = {
|
||||||
state: "error",
|
state: "error",
|
||||||
error: safeguardCircularError(error),
|
error: safeguardCircularError(error),
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import axios from "axios";
|
|
||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import { SearchResult } from "../../src/lib/entities";
|
import { SearchResult } from "../../src/lib/entities";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
|
@ -6,7 +5,6 @@ import { logger } from "../lib/logger";
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
|
|
||||||
export async function fireEngineMap(
|
export async function fireEngineMap(
|
||||||
q: string,
|
q: string,
|
||||||
options: {
|
options: {
|
||||||
|
@ -37,18 +35,18 @@ export async function fireEngineMap(
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = {
|
const response = await fetch(`${process.env.FIRE_ENGINE_BETA_URL}/search`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
url: `${process.env.FIRE_ENGINE_BETA_URL}/search`,
|
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"X-Disable-Cache": "true"
|
"X-Disable-Cache": "true",
|
||||||
},
|
},
|
||||||
data: data,
|
body: data,
|
||||||
};
|
});
|
||||||
const response = await axios(config);
|
|
||||||
if (response && response.data) {
|
if (response.ok) {
|
||||||
return response.data;
|
const responseData = await response.json();
|
||||||
|
return responseData;
|
||||||
} else {
|
} else {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ import { configDotenv } from "dotenv";
|
||||||
import { scrapeOptions } from "../controllers/v1/types";
|
import { scrapeOptions } from "../controllers/v1/types";
|
||||||
import { getRateLimiterPoints } from "./rate-limiter";
|
import { getRateLimiterPoints } from "./rate-limiter";
|
||||||
import { cleanOldConcurrencyLimitEntries, pushConcurrencyLimitActiveJob, removeConcurrencyLimitActiveJob, takeConcurrencyLimitedJob } from "../lib/concurrency-limit";
|
import { cleanOldConcurrencyLimitEntries, pushConcurrencyLimitActiveJob, removeConcurrencyLimitActiveJob, takeConcurrencyLimitedJob } from "../lib/concurrency-limit";
|
||||||
|
import { ScrapeUrlResponse } from "../scraper/scrapeURL";
|
||||||
configDotenv();
|
configDotenv();
|
||||||
|
|
||||||
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
|
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
@ -289,17 +290,12 @@ async function processJob(job: Job & { id: string }, token: string) {
|
||||||
] : [])
|
] : [])
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (!pipeline.success) {
|
|
||||||
// TODO: let's Not do this
|
|
||||||
throw pipeline.error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const end = Date.now();
|
const end = Date.now();
|
||||||
const timeTakenInSeconds = (end - start) / 1000;
|
const timeTakenInSeconds = (end - start) / 1000;
|
||||||
|
|
||||||
const doc = pipeline.document;
|
const doc = (pipeline as ScrapeUrlResponse & { success: true }).document;
|
||||||
|
|
||||||
const rawHtml = doc.rawHtml ?? "";
|
const rawHtml = doc?.rawHtml ?? "";
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
success: true,
|
success: true,
|
||||||
|
@ -313,6 +309,16 @@ async function processJob(job: Job & { id: string }, token: string) {
|
||||||
document: doc,
|
document: doc,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (job.data.doctor) {
|
||||||
|
(data.document as any) = pipeline as unknown as Document; // force it in there
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!pipeline.success) {
|
||||||
|
// TODO: let's Not do this
|
||||||
|
throw pipeline.error;
|
||||||
|
}
|
||||||
|
|
||||||
if (job.data.webhook && job.data.mode !== "crawl" && job.data.v1) {
|
if (job.data.webhook && job.data.mode !== "crawl" && job.data.v1) {
|
||||||
await callWebhook(
|
await callWebhook(
|
||||||
job.data.team_id,
|
job.data.team_id,
|
||||||
|
|
|
@ -29,8 +29,8 @@ export interface WebScraperOptions {
|
||||||
crawlerOptions?: any;
|
crawlerOptions?: any;
|
||||||
scrapeOptions: ScrapeOptions;
|
scrapeOptions: ScrapeOptions;
|
||||||
internalOptions?: InternalOptions;
|
internalOptions?: InternalOptions;
|
||||||
team_id: string;
|
team_id: string | null;
|
||||||
plan: string;
|
plan: string | null;
|
||||||
origin?: string;
|
origin?: string;
|
||||||
crawl_id?: string;
|
crawl_id?: string;
|
||||||
sitemapped?: boolean;
|
sitemapped?: boolean;
|
||||||
|
@ -46,7 +46,7 @@ export interface RunWebScraperParams {
|
||||||
internalOptions?: InternalOptions;
|
internalOptions?: InternalOptions;
|
||||||
// onSuccess: (result: V1Document, mode: string) => void;
|
// onSuccess: (result: V1Document, mode: string) => void;
|
||||||
// onError: (error: Error) => void;
|
// onError: (error: Error) => void;
|
||||||
team_id: string;
|
team_id: string | null;
|
||||||
bull_job_id: string;
|
bull_job_id: string;
|
||||||
priority?: number;
|
priority?: number;
|
||||||
is_scrape?: boolean;
|
is_scrape?: boolean;
|
||||||
|
@ -166,4 +166,4 @@ export type PlanType =
|
||||||
| "";
|
| "";
|
||||||
|
|
||||||
|
|
||||||
export type WebhookEventType = "crawl.page" | "batch_scrape.page" | "crawl.started" | "crawl.completed" | "batch_scrape.completed" | "crawl.failed";
|
export type WebhookEventType = "crawl.page" | "batch_scrape.page" | "crawl.started" | "batch_scrape.started" | "crawl.completed" | "batch_scrape.completed" | "crawl.failed";
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@mendable/firecrawl-js",
|
"name": "@mendable/firecrawl-js",
|
||||||
"version": "1.8.2",
|
"version": "1.8.4",
|
||||||
"description": "JavaScript SDK for Firecrawl API",
|
"description": "JavaScript SDK for Firecrawl API",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|
|
@ -221,6 +221,7 @@ export interface MapParams {
|
||||||
search?: string;
|
search?: string;
|
||||||
ignoreSitemap?: boolean;
|
ignoreSitemap?: boolean;
|
||||||
includeSubdomains?: boolean;
|
includeSubdomains?: boolean;
|
||||||
|
sitemapOnly?: boolean;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -543,16 +544,18 @@ export default class FirecrawlApp {
|
||||||
* @param params - Additional parameters for the scrape request.
|
* @param params - Additional parameters for the scrape request.
|
||||||
* @param pollInterval - Time in seconds for job status checks.
|
* @param pollInterval - Time in seconds for job status checks.
|
||||||
* @param idempotencyKey - Optional idempotency key for the request.
|
* @param idempotencyKey - Optional idempotency key for the request.
|
||||||
|
* @param webhook - Optional webhook for the batch scrape.
|
||||||
* @returns The response from the crawl operation.
|
* @returns The response from the crawl operation.
|
||||||
*/
|
*/
|
||||||
async batchScrapeUrls(
|
async batchScrapeUrls(
|
||||||
urls: string[],
|
urls: string[],
|
||||||
params?: ScrapeParams,
|
params?: ScrapeParams,
|
||||||
pollInterval: number = 2,
|
pollInterval: number = 2,
|
||||||
idempotencyKey?: string
|
idempotencyKey?: string,
|
||||||
|
webhook?: CrawlParams["webhook"],
|
||||||
): Promise<BatchScrapeStatusResponse | ErrorResponse> {
|
): Promise<BatchScrapeStatusResponse | ErrorResponse> {
|
||||||
const headers = this.prepareHeaders(idempotencyKey);
|
const headers = this.prepareHeaders(idempotencyKey);
|
||||||
let jsonData: any = { urls, ...(params ?? {}) };
|
let jsonData: any = { urls, ...(params ?? {}), webhook };
|
||||||
try {
|
try {
|
||||||
const response: AxiosResponse = await this.postRequest(
|
const response: AxiosResponse = await this.postRequest(
|
||||||
this.apiUrl + `/v1/batch/scrape`,
|
this.apiUrl + `/v1/batch/scrape`,
|
||||||
|
|
Loading…
Reference in New Issue
Block a user