mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 11:42:24 +08:00
Compare commits
3 Commits
3f2914b97f
...
9a8a6506e9
Author | SHA1 | Date | |
---|---|---|---|
|
9a8a6506e9 | ||
|
9298a05045 | ||
|
faf11acf82 |
2
apps/api/.gitignore
vendored
2
apps/api/.gitignore
vendored
|
@ -9,3 +9,5 @@ dump.rdb
|
|||
|
||||
.rdb
|
||||
.sentryclirc
|
||||
|
||||
doctor-*.html
|
||||
|
|
|
@ -46,4 +46,13 @@ content-type: application/json
|
|||
@batchScrapeId = {{batchScrape.response.body.$.id}}
|
||||
# @name batchScrapeStatus
|
||||
GET {{baseUrl}}/v1/crawl/{{batchScrapeId}} HTTP/1.1
|
||||
Authorization: Bearer {{$dotenv TEST_API_KEY}}
|
||||
Authorization: Bearer {{$dotenv TEST_API_KEY}}
|
||||
|
||||
### URL Doctor
|
||||
# @name urlDoctor
|
||||
POST {{baseUrl}}/admin/{{$dotenv BULL_AUTH_KEY}}/doctor HTTP/1.1
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"url": "https://firecrawl.dev"
|
||||
}
|
104
apps/api/src/controllers/v1/admin/doctor-status.ts
Normal file
104
apps/api/src/controllers/v1/admin/doctor-status.ts
Normal file
|
@ -0,0 +1,104 @@
|
|||
import { Request, Response } from "express";
|
||||
import { logger as _logger } from "../../../lib/logger";
|
||||
import { ScrapeUrlResponse } from "../../../scraper/scrapeURL";
|
||||
import { getScrapeQueue, redisConnection } from "../../../services/queue-service";
|
||||
import type { Permutation } from "./doctor";
|
||||
import { Job } from "bullmq";
|
||||
|
||||
const logger = _logger.child({ module: "doctorStatusController" });
|
||||
|
||||
const errorReplacer = (_, value) => {
|
||||
if (value instanceof Error) {
|
||||
return {
|
||||
...value,
|
||||
name: value.name,
|
||||
message: value.message,
|
||||
stack: value.stack,
|
||||
cause: value.cause,
|
||||
}
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
};
|
||||
|
||||
type PermutationResult = ({
|
||||
state: "done",
|
||||
result: ScrapeUrlResponse & {
|
||||
success: true
|
||||
},
|
||||
} | {
|
||||
state: "thrownError",
|
||||
error: string | Error | null | undefined,
|
||||
} | {
|
||||
state: "error",
|
||||
result: ScrapeUrlResponse & {
|
||||
success: false
|
||||
},
|
||||
} | {
|
||||
state: "pending",
|
||||
}) & {
|
||||
permutation: Permutation,
|
||||
};
|
||||
|
||||
export async function doctorStatusController(req: Request, res: Response) {
|
||||
try {
|
||||
const doctorId = req.params.id;
|
||||
|
||||
const meta: { url: string } | null = JSON.parse(await redisConnection.get("doctor:" + doctorId) ?? "null");
|
||||
const permutations: Permutation[] | null = JSON.parse(await redisConnection.get("doctor:" + doctorId + ":permutations") ?? "null");
|
||||
if (permutations === null || meta === null) {
|
||||
return res.status(404).json({ error: "Doctor entry not found" });
|
||||
}
|
||||
|
||||
const jobs = (await Promise.all(permutations.map(x => getScrapeQueue().getJob(x.jobId)))).filter(x => x) as Job<unknown, ScrapeUrlResponse>[];
|
||||
|
||||
const results: PermutationResult[] = await Promise.all(jobs.map(async job => {
|
||||
const permutation = permutations.find(x => x.jobId === job.id)!;
|
||||
const state = await job.getState();
|
||||
if (state === "completed" && job.data) {
|
||||
if (job.returnvalue.success) {
|
||||
return {
|
||||
state: "done",
|
||||
result: job.returnvalue,
|
||||
permutation,
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
state: "error",
|
||||
result: job.returnvalue,
|
||||
permutation,
|
||||
}
|
||||
}
|
||||
} else if (state === "failed") {
|
||||
return {
|
||||
state: "thrownError",
|
||||
error: job.failedReason,
|
||||
permutation,
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
state: "pending",
|
||||
permutation,
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
const html = "<head><meta charset=\"utf8\"></head><body style=\"font-family: sans-serif; padding: 1rem;\"><h1>Doctor</h1><p>URL: <code>" + meta.url + "</code></p>"
|
||||
+ results.map(x => "<h2>" + (x.state === "pending" ? "⏳" : x.state === "done" ? "✅" : "❌") + " " + x.permutation.name + "</h2><p>Scrape options: <code>" + JSON.stringify(x.permutation.options) + "</code></p>"
|
||||
+ "<p>Internal options: <code>" + JSON.stringify(x.permutation.internal) + "</code></p>"
|
||||
+ (x.state !== "pending" ? ("<code><pre>" + ((x.state === "done"
|
||||
? JSON.stringify(x.result, errorReplacer, 4)
|
||||
: x.state === "thrownError"
|
||||
? (x.error instanceof Error
|
||||
? (x.error.message + "\n" + (x.error.stack ?? ""))
|
||||
: (x.error ?? "<unknown error>"))
|
||||
: (JSON.stringify(x.result, errorReplacer, 4))))
|
||||
.replaceAll("<", "<").replaceAll(">", ">") + "</pre></code>"): "")).join("")
|
||||
+ "</body>"
|
||||
|
||||
res.header("Content-Type", "text/html").send(html);
|
||||
} catch (error) {
|
||||
logger.error("Doctor status error", { error });
|
||||
res.status(500).json({ error: "Internal server error" });
|
||||
}
|
||||
}
|
84
apps/api/src/controllers/v1/admin/doctor.ts
Normal file
84
apps/api/src/controllers/v1/admin/doctor.ts
Normal file
|
@ -0,0 +1,84 @@
|
|||
import { Request, Response } from "express";
|
||||
import { logger as _logger } from "../../../lib/logger";
|
||||
import { ScrapeUrlResponse, InternalOptions } from "../../../scraper/scrapeURL";
|
||||
import { z } from "zod";
|
||||
import { scrapeOptions } from "../types";
|
||||
import { Engine, engineOptions, engines } from "../../../scraper/scrapeURL/engines";
|
||||
import { addScrapeJob, addScrapeJobs } from "../../../services/queue-jobs";
|
||||
import { redisConnection } from "../../../services/queue-service";
|
||||
|
||||
const logger = _logger.child({ module: "doctorController" });
|
||||
|
||||
export type Permutation = {
|
||||
options: z.input<typeof scrapeOptions>,
|
||||
internal: InternalOptions,
|
||||
name: string,
|
||||
jobId: string,
|
||||
};
|
||||
|
||||
export async function doctorController(req: Request, res: Response) {
|
||||
try {
|
||||
const doctorId = crypto.randomUUID();
|
||||
|
||||
const permutations: Permutation[] = [
|
||||
{ options: {}, internal: { verbose: true }, name: "bare", jobId: crypto.randomUUID() },
|
||||
...Object.entries(engineOptions).filter(([name, options]) => options.quality > 0 && engines.includes(name as Engine)).map(([name, _options]) => ({
|
||||
options: {}, internal: { forceEngine: name as Engine, verbose: true }, name, jobId: crypto.randomUUID(),
|
||||
})),
|
||||
];
|
||||
|
||||
await addScrapeJobs(permutations.map(perm => ({
|
||||
data: {
|
||||
url: req.body.url,
|
||||
mode: "single_urls",
|
||||
team_id: null,
|
||||
scrapeOptions: scrapeOptions.parse(perm.options),
|
||||
internalOptions: perm.internal,
|
||||
plan: null,
|
||||
origin: "doctor",
|
||||
is_scrape: true,
|
||||
doctor: true,
|
||||
},
|
||||
opts: {
|
||||
jobId: perm.jobId,
|
||||
priority: 10,
|
||||
},
|
||||
})));
|
||||
|
||||
await redisConnection.set("doctor:" + doctorId, JSON.stringify({ url: req.body.url }), "EX", 86400);
|
||||
await redisConnection.set("doctor:" + doctorId + ":permutations", JSON.stringify(permutations), "EX", 86400);
|
||||
|
||||
const protocol = process.env.ENV === "local" ? req.protocol : "https";
|
||||
|
||||
res.json({ ok: true, id: doctorId, url: `${protocol}://${req.get("host")}/admin/${process.env.BULL_AUTH_KEY}/doctor/${doctorId}` });
|
||||
|
||||
// await Promise.all(permutations.map(async perm => {
|
||||
// try {
|
||||
// const result = await scrapeURL(doctorId + ":bare", url, scrapeOptions.parse(perm.options), perm.internal);
|
||||
// if (result.success) {
|
||||
// results.push({
|
||||
// state: "done",
|
||||
// result,
|
||||
// permutation: perm,
|
||||
// });
|
||||
// } else {
|
||||
// results.push({
|
||||
// state: "error",
|
||||
// result,
|
||||
// permutation: perm,
|
||||
// });
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.error("Permutation " + perm.name + " failed with error", { error });
|
||||
// results.push({
|
||||
// state: "thrownError",
|
||||
// error,
|
||||
// permutation: perm,
|
||||
// });
|
||||
// }
|
||||
// }));
|
||||
} catch (error) {
|
||||
logger.error("Doctor error", { error });
|
||||
res.status(500).json({ error: "Internal server error" });
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
import * as winston from "winston";
|
||||
import Transport from "winston-transport";
|
||||
|
||||
import { configDotenv } from "dotenv";
|
||||
configDotenv();
|
||||
|
@ -49,3 +50,33 @@ export const logger = winston.createLogger({
|
|||
}),
|
||||
],
|
||||
});
|
||||
|
||||
export type ArrayTransportOptions = Transport.TransportStreamOptions & {
|
||||
array: any[];
|
||||
scrapeId?: string;
|
||||
};
|
||||
|
||||
export class ArrayTransport extends Transport {
|
||||
private array: any[];
|
||||
private scrapeId?: string;
|
||||
|
||||
constructor(opts: ArrayTransportOptions) {
|
||||
super(opts);
|
||||
this.array = opts.array;
|
||||
this.scrapeId = opts.scrapeId;
|
||||
}
|
||||
|
||||
log(info, next) {
|
||||
setImmediate(() => {
|
||||
this.emit("logged", info);
|
||||
});
|
||||
|
||||
if (this.scrapeId !== undefined && info.scrapeId !== this.scrapeId) {
|
||||
return next();
|
||||
}
|
||||
|
||||
this.array.push(info);
|
||||
|
||||
next();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ export async function runWebScraper({
|
|||
}
|
||||
}
|
||||
|
||||
if(is_scrape === false) {
|
||||
if(is_scrape === false && team_id) {
|
||||
let creditsToBeBilled = 1; // Assuming 1 credit per document
|
||||
if (scrapeOptions.extract) {
|
||||
creditsToBeBilled = 5;
|
||||
|
|
|
@ -8,6 +8,8 @@ import {
|
|||
} from "../controllers/v0/admin/queue";
|
||||
import { wrap } from "./v1";
|
||||
import { acucCacheClearController } from "../controllers/v0/admin/acuc-cache-clear";
|
||||
import { doctorController } from "../controllers/v1/admin/doctor";
|
||||
import { doctorStatusController } from "../controllers/v1/admin/doctor-status";
|
||||
|
||||
export const adminRouter = express.Router();
|
||||
|
||||
|
@ -40,3 +42,13 @@ adminRouter.post(
|
|||
`/admin/${process.env.BULL_AUTH_KEY}/acuc-cache-clear`,
|
||||
wrap(acucCacheClearController),
|
||||
);
|
||||
|
||||
adminRouter.post(
|
||||
`/admin/${process.env.BULL_AUTH_KEY}/doctor`,
|
||||
wrap(doctorController),
|
||||
);
|
||||
|
||||
adminRouter.get(
|
||||
`/admin/${process.env.BULL_AUTH_KEY}/doctor/:id`,
|
||||
wrap(doctorStatusController),
|
||||
);
|
||||
|
|
|
@ -2,7 +2,7 @@ import { Logger } from "winston";
|
|||
import * as Sentry from "@sentry/node";
|
||||
|
||||
import { Document, ScrapeOptions } from "../../controllers/v1/types";
|
||||
import { logger } from "../../lib/logger";
|
||||
import { ArrayTransport, logger } from "../../lib/logger";
|
||||
import { buildFallbackList, Engine, EngineScrapeResult, FeatureFlag, scrapeURLWithEngine } from "./engines";
|
||||
import { parseMarkdown } from "../../lib/html-to-markdown";
|
||||
import { AddFeatureError, EngineError, NoEnginesLeftError, TimeoutError } from "./error";
|
||||
|
@ -97,6 +97,9 @@ function buildMetaObject(id: string, url: string, options: ScrapeOptions, intern
|
|||
|
||||
const _logger = logger.child({ module: "ScrapeURL", scrapeId: id, scrapeURL: url });
|
||||
const logs: any[] = [];
|
||||
if (internalOptions.verbose) {
|
||||
_logger.add(new ArrayTransport({ array: logs, scrapeId: id }));
|
||||
}
|
||||
|
||||
return {
|
||||
id, url, options, internalOptions,
|
||||
|
@ -114,6 +117,8 @@ export type InternalOptions = {
|
|||
v0CrawlOnlyUrls?: boolean;
|
||||
v0UseFastMode?: boolean;
|
||||
v0DisableJsDom?: boolean;
|
||||
|
||||
verbose?: boolean; // stores logs. will cause high memory usage. use with caution
|
||||
};
|
||||
|
||||
export type EngineResultsTracker = { [E in Engine]?: ({
|
||||
|
@ -229,7 +234,7 @@ async function scrapeURLLoop(
|
|||
throw error;
|
||||
} else {
|
||||
Sentry.captureException(error);
|
||||
meta.logger.info("An unexpected error happened while scraping with " + engine + ".", { error });
|
||||
meta.logger.warn("An unexpected error happened while scraping with " + engine + ".", { error });
|
||||
results[engine] = {
|
||||
state: "error",
|
||||
error: safeguardCircularError(error),
|
||||
|
|
|
@ -38,6 +38,7 @@ import { configDotenv } from "dotenv";
|
|||
import { scrapeOptions } from "../controllers/v1/types";
|
||||
import { getRateLimiterPoints } from "./rate-limiter";
|
||||
import { cleanOldConcurrencyLimitEntries, pushConcurrencyLimitActiveJob, removeConcurrencyLimitActiveJob, takeConcurrencyLimitedJob } from "../lib/concurrency-limit";
|
||||
import { ScrapeUrlResponse } from "../scraper/scrapeURL";
|
||||
configDotenv();
|
||||
|
||||
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
@ -289,17 +290,12 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||
] : [])
|
||||
]);
|
||||
|
||||
if (!pipeline.success) {
|
||||
// TODO: let's Not do this
|
||||
throw pipeline.error;
|
||||
}
|
||||
|
||||
const end = Date.now();
|
||||
const timeTakenInSeconds = (end - start) / 1000;
|
||||
|
||||
const doc = pipeline.document;
|
||||
const doc = (pipeline as ScrapeUrlResponse & { success: true }).document;
|
||||
|
||||
const rawHtml = doc.rawHtml ?? "";
|
||||
const rawHtml = doc?.rawHtml ?? "";
|
||||
|
||||
const data = {
|
||||
success: true,
|
||||
|
@ -313,6 +309,16 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||
document: doc,
|
||||
};
|
||||
|
||||
if (job.data.doctor) {
|
||||
(data.document as any) = pipeline as unknown as Document; // force it in there
|
||||
return data;
|
||||
}
|
||||
|
||||
if (!pipeline.success) {
|
||||
// TODO: let's Not do this
|
||||
throw pipeline.error;
|
||||
}
|
||||
|
||||
if (job.data.webhook && job.data.mode !== "crawl" && job.data.v1) {
|
||||
await callWebhook(
|
||||
job.data.team_id,
|
||||
|
|
|
@ -29,8 +29,8 @@ export interface WebScraperOptions {
|
|||
crawlerOptions?: any;
|
||||
scrapeOptions: ScrapeOptions;
|
||||
internalOptions?: InternalOptions;
|
||||
team_id: string;
|
||||
plan: string;
|
||||
team_id: string | null;
|
||||
plan: string | null;
|
||||
origin?: string;
|
||||
crawl_id?: string;
|
||||
sitemapped?: boolean;
|
||||
|
@ -46,7 +46,7 @@ export interface RunWebScraperParams {
|
|||
internalOptions?: InternalOptions;
|
||||
// onSuccess: (result: V1Document, mode: string) => void;
|
||||
// onError: (error: Error) => void;
|
||||
team_id: string;
|
||||
team_id: string | null;
|
||||
bull_job_id: string;
|
||||
priority?: number;
|
||||
is_scrape?: boolean;
|
||||
|
|
Loading…
Reference in New Issue
Block a user