mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 11:42:24 +08:00
Compare commits
22 Commits
c40fdfea3c
...
18352bc153
Author | SHA1 | Date | |
---|---|---|---|
|
18352bc153 | ||
|
3c1b1909f8 | ||
|
9519897102 | ||
|
7f084c6c43 | ||
|
e8bd089c8a | ||
|
3fcdf57d2f | ||
|
d62f12c9d9 | ||
|
f155449458 | ||
|
431e64e752 | ||
|
7bca4486b4 | ||
|
df05124ef5 | ||
|
86a78a03cb | ||
|
62c8b63b84 | ||
|
5519f077aa | ||
|
0a1c99074f | ||
|
bd928b1512 | ||
|
0310cd2afa | ||
|
0d1c4e4e09 | ||
|
32be2cf786 | ||
|
ea1302960f | ||
|
1a636b4e59 | ||
|
5453539fb4 |
|
@ -50,79 +50,79 @@
|
|||
"typescript": "^5.4.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.24.3",
|
||||
"@anthropic-ai/sdk": "^0.32.1",
|
||||
"@brillout/import": "^0.2.2",
|
||||
"@bull-board/api": "^5.20.5",
|
||||
"@bull-board/express": "^5.20.5",
|
||||
"@bull-board/api": "^6.4.0",
|
||||
"@bull-board/express": "^6.4.0",
|
||||
"@devil7softwares/pos": "^1.0.2",
|
||||
"@dqbd/tiktoken": "^1.0.16",
|
||||
"@nangohq/node": "^0.40.8",
|
||||
"@sentry/cli": "^2.33.1",
|
||||
"@sentry/node": "^8.26.0",
|
||||
"@sentry/profiling-node": "^8.26.0",
|
||||
"@supabase/supabase-js": "^2.44.2",
|
||||
"@types/express-ws": "^3.0.4",
|
||||
"@types/ws": "^8.5.12",
|
||||
"ajv": "^8.16.0",
|
||||
"async": "^3.2.5",
|
||||
"@dqbd/tiktoken": "^1.0.17",
|
||||
"@nangohq/node": "^0.42.22",
|
||||
"@sentry/cli": "^2.38.2",
|
||||
"@sentry/node": "^8.38.0",
|
||||
"@sentry/profiling-node": "^8.38.0",
|
||||
"@supabase/supabase-js": "^2.46.1",
|
||||
"@types/express-ws": "^3.0.5",
|
||||
"@types/ws": "^8.5.13",
|
||||
"ajv": "^8.17.1",
|
||||
"async": "^3.2.6",
|
||||
"async-mutex": "^0.5.0",
|
||||
"axios": "^1.3.4",
|
||||
"axios": "^1.7.7",
|
||||
"axios-retry": "^4.5.0",
|
||||
"bottleneck": "^2.19.5",
|
||||
"bullmq": "^5.11.0",
|
||||
"cacheable-lookup": "^6.1.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"bullmq": "^5.25.6",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"cheerio": "^1.0.0",
|
||||
"cohere": "^1.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"cron-parser": "^4.9.0",
|
||||
"date-fns": "^3.6.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"dotenv-cli": "^7.4.2",
|
||||
"escape-html": "^1.0.3",
|
||||
"express-rate-limit": "^7.3.1",
|
||||
"express-rate-limit": "^7.4.1",
|
||||
"express-ws": "^5.0.2",
|
||||
"glob": "^10.4.2",
|
||||
"glob": "^11.0.0",
|
||||
"gpt3-tokenizer": "^1.1.5",
|
||||
"ioredis": "^5.4.1",
|
||||
"joplin-turndown-plugin-gfm": "^1.0.12",
|
||||
"json-schema-to-zod": "^2.3.0",
|
||||
"json-schema-to-zod": "^2.4.1",
|
||||
"keyword-extractor": "^0.0.28",
|
||||
"koffi": "^2.9.0",
|
||||
"langchain": "^0.2.8",
|
||||
"koffi": "^2.9.2",
|
||||
"langchain": "^0.3.5",
|
||||
"languagedetect": "^2.0.0",
|
||||
"logsnag": "^1.0.0",
|
||||
"luxon": "^3.4.3",
|
||||
"marked": "^14.1.2",
|
||||
"luxon": "^3.5.0",
|
||||
"marked": "^15.0.0",
|
||||
"md5": "^2.3.0",
|
||||
"moment": "^2.29.4",
|
||||
"mongoose": "^8.4.4",
|
||||
"natural": "^7.0.7",
|
||||
"openai": "^4.57.0",
|
||||
"mongoose": "^8.8.1",
|
||||
"natural": "^8.0.1",
|
||||
"openai": "^4.72.0",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"pos": "^0.4.2",
|
||||
"posthog-node": "^4.0.1",
|
||||
"posthog-node": "^4.2.1",
|
||||
"promptable": "^0.0.10",
|
||||
"puppeteer": "^22.12.1",
|
||||
"rate-limiter-flexible": "2.4.2",
|
||||
"puppeteer": "^23.7.1",
|
||||
"rate-limiter-flexible": "5.0.4",
|
||||
"redlock": "5.0.0-beta.2",
|
||||
"resend": "^3.4.0",
|
||||
"resend": "^4.0.0",
|
||||
"robots-parser": "^3.0.1",
|
||||
"scrapingbee": "^1.7.4",
|
||||
"stripe": "^16.1.0",
|
||||
"systeminformation": "^5.22.11",
|
||||
"scrapingbee": "^1.7.5",
|
||||
"stripe": "^17.3.1",
|
||||
"systeminformation": "^5.23.5",
|
||||
"turndown": "^7.1.3",
|
||||
"turndown-plugin-gfm": "^1.0.2",
|
||||
"typesense": "^1.5.4",
|
||||
"undici": "^6.20.1",
|
||||
"unstructured-client": "^0.11.3",
|
||||
"uuid": "^10.0.0",
|
||||
"winston": "^3.14.2",
|
||||
"winston-transport": "^4.8.0",
|
||||
"unstructured-client": "^0.18.2",
|
||||
"uuid": "^11.0.3",
|
||||
"winston": "^3.17.0",
|
||||
"winston-transport": "^4.9.0",
|
||||
"wordpos": "^2.1.0",
|
||||
"ws": "^8.18.0",
|
||||
"xml2js": "^0.6.2",
|
||||
"zod": "^3.23.8",
|
||||
"zod-to-json-schema": "^3.23.1"
|
||||
"zod-to-json-schema": "^3.23.5"
|
||||
},
|
||||
"nodemonConfig": {
|
||||
"ignore": [
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -75,7 +75,7 @@ export async function crawlController(req: Request, res: Response) {
|
|||
await checkTeamCredits(chunk, team_id, limitCheck);
|
||||
|
||||
if (!creditsCheckSuccess) {
|
||||
return res.status(402).json({ error: "Insufficient credits. You may be requesting with a higher limit than the amount of credits you have left. If not, upgrade your plan at https://firecrawl.dev/pricing or contact us at hello@firecrawl.com" });
|
||||
return res.status(402).json({ error: "Insufficient credits. You may be requesting with a higher limit than the amount of credits you have left. If not, upgrade your plan at https://firecrawl.dev/pricing or contact us at help@firecrawl.com" });
|
||||
}
|
||||
|
||||
// TODO: need to do this to v1
|
||||
|
|
|
@ -209,7 +209,7 @@ export async function scrapeController(req: Request, res: Response) {
|
|||
earlyReturn = true;
|
||||
return res.status(500).json({
|
||||
error:
|
||||
"Error checking team credits. Please contact hello@firecrawl.com for help.",
|
||||
"Error checking team credits. Please contact help@firecrawl.com for help.",
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import { logCrawl } from "../../services/logging/crawl_log";
|
|||
import { getScrapeQueue } from "../../services/queue-service";
|
||||
import { getJobPriority } from "../../lib/job-priority";
|
||||
import { addScrapeJobs } from "../../services/queue-jobs";
|
||||
import { callWebhook } from "../../services/webhook";
|
||||
|
||||
export async function batchScrapeController(
|
||||
req: RequestWithAuth<{}, CrawlResponse, BatchScrapeRequest>,
|
||||
|
@ -66,6 +67,7 @@ export async function batchScrapeController(
|
|||
crawl_id: id,
|
||||
sitemapped: true,
|
||||
v1: true,
|
||||
webhook: req.body.webhook,
|
||||
},
|
||||
opts: {
|
||||
jobId: uuidv4(),
|
||||
|
@ -85,6 +87,10 @@ export async function batchScrapeController(
|
|||
);
|
||||
await addScrapeJobs(jobs);
|
||||
|
||||
if(req.body.webhook) {
|
||||
await callWebhook(req.auth.team_id, id, null, req.body.webhook, true, "batch_scrape.started");
|
||||
}
|
||||
|
||||
const protocol = process.env.ENV === "local" ? req.protocol : "https";
|
||||
|
||||
return res.status(200).json({
|
||||
|
|
|
@ -175,7 +175,7 @@ export async function crawlStatusWSController(ws: WebSocket, req: RequestWithAut
|
|||
logger.error("Error occurred in WebSocket! (" + req.path + ") -- ID " + id + " -- " + verbose);
|
||||
return close(ws, 1011, {
|
||||
type: "error",
|
||||
error: "An unexpected error occurred. Please contact hello@firecrawl.com for help. Your exception ID is " + id
|
||||
error: "An unexpected error occurred. Please contact help@firecrawl.com for help. Your exception ID is " + id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
import { Response } from "express";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import {
|
||||
mapRequestSchema,
|
||||
RequestWithAuth,
|
||||
scrapeOptions,
|
||||
} from "./types";
|
||||
import { mapRequestSchema, RequestWithAuth, scrapeOptions } from "./types";
|
||||
import { crawlToCrawler, StoredCrawl } from "../../lib/crawl-redis";
|
||||
import { MapResponse, MapRequest } from "./types";
|
||||
import { configDotenv } from "dotenv";
|
||||
|
@ -46,6 +42,7 @@ export async function mapController(
|
|||
originUrl: req.body.url,
|
||||
crawlerOptions: {
|
||||
...req.body,
|
||||
limit: req.body.sitemapOnly ? 10000000 : limit,
|
||||
scrapeOptions: undefined,
|
||||
},
|
||||
scrapeOptions: scrapeOptions.parse({}),
|
||||
|
@ -57,77 +54,93 @@ export async function mapController(
|
|||
|
||||
const crawler = crawlToCrawler(id, sc);
|
||||
|
||||
let urlWithoutWww = req.body.url.replace("www.", "");
|
||||
|
||||
let mapUrl = req.body.search
|
||||
? `"${req.body.search}" site:${urlWithoutWww}`
|
||||
: `site:${req.body.url}`;
|
||||
|
||||
const resultsPerPage = 100;
|
||||
const maxPages = Math.ceil(Math.min(MAX_FIRE_ENGINE_RESULTS, limit) / resultsPerPage);
|
||||
|
||||
const cacheKey = `fireEngineMap:${mapUrl}`;
|
||||
const cachedResult = null;
|
||||
|
||||
let allResults: any[] = [];
|
||||
let pagePromises: Promise<any>[] = [];
|
||||
|
||||
if (cachedResult) {
|
||||
allResults = JSON.parse(cachedResult);
|
||||
} else {
|
||||
const fetchPage = async (page: number) => {
|
||||
return fireEngineMap(mapUrl, {
|
||||
numResults: resultsPerPage,
|
||||
page: page,
|
||||
// If sitemapOnly is true, only get links from sitemap
|
||||
if (req.body.sitemapOnly) {
|
||||
const sitemap = await crawler.tryGetSitemap(true, true);
|
||||
if (sitemap !== null) {
|
||||
sitemap.forEach((x) => {
|
||||
links.push(x.url);
|
||||
});
|
||||
};
|
||||
links = links.slice(1, limit);
|
||||
}
|
||||
} else {
|
||||
let urlWithoutWww = req.body.url.replace("www.", "");
|
||||
|
||||
pagePromises = Array.from({ length: maxPages }, (_, i) => fetchPage(i + 1));
|
||||
allResults = await Promise.all(pagePromises);
|
||||
let mapUrl = req.body.search
|
||||
? `"${req.body.search}" site:${urlWithoutWww}`
|
||||
: `site:${req.body.url}`;
|
||||
|
||||
await redis.set(cacheKey, JSON.stringify(allResults), "EX", 24 * 60 * 60); // Cache for 24 hours
|
||||
}
|
||||
const resultsPerPage = 100;
|
||||
const maxPages = Math.ceil(
|
||||
Math.min(MAX_FIRE_ENGINE_RESULTS, limit) / resultsPerPage
|
||||
);
|
||||
|
||||
// Parallelize sitemap fetch with serper search
|
||||
const [sitemap, ...searchResults] = await Promise.all([
|
||||
req.body.ignoreSitemap ? null : crawler.tryGetSitemap(),
|
||||
...(cachedResult ? [] : pagePromises),
|
||||
]);
|
||||
const cacheKey = `fireEngineMap:${mapUrl}`;
|
||||
const cachedResult = null;
|
||||
|
||||
if (!cachedResult) {
|
||||
allResults = searchResults;
|
||||
}
|
||||
let allResults: any[] = [];
|
||||
let pagePromises: Promise<any>[] = [];
|
||||
|
||||
if (sitemap !== null) {
|
||||
sitemap.forEach((x) => {
|
||||
links.push(x.url);
|
||||
});
|
||||
}
|
||||
|
||||
let mapResults = allResults
|
||||
.flat()
|
||||
.filter((result) => result !== null && result !== undefined);
|
||||
|
||||
const minumumCutoff = Math.min(MAX_MAP_LIMIT, limit);
|
||||
if (mapResults.length > minumumCutoff) {
|
||||
mapResults = mapResults.slice(0, minumumCutoff);
|
||||
}
|
||||
|
||||
if (mapResults.length > 0) {
|
||||
if (req.body.search) {
|
||||
// Ensure all map results are first, maintaining their order
|
||||
links = [
|
||||
mapResults[0].url,
|
||||
...mapResults.slice(1).map((x) => x.url),
|
||||
...links,
|
||||
];
|
||||
if (cachedResult) {
|
||||
allResults = JSON.parse(cachedResult);
|
||||
} else {
|
||||
mapResults.map((x) => {
|
||||
const fetchPage = async (page: number) => {
|
||||
return fireEngineMap(mapUrl, {
|
||||
numResults: resultsPerPage,
|
||||
page: page,
|
||||
});
|
||||
};
|
||||
|
||||
pagePromises = Array.from({ length: maxPages }, (_, i) =>
|
||||
fetchPage(i + 1)
|
||||
);
|
||||
allResults = await Promise.all(pagePromises);
|
||||
|
||||
await redis.set(cacheKey, JSON.stringify(allResults), "EX", 24 * 60 * 60); // Cache for 24 hours
|
||||
}
|
||||
|
||||
// Parallelize sitemap fetch with serper search
|
||||
const [sitemap, ...searchResults] = await Promise.all([
|
||||
req.body.ignoreSitemap ? null : crawler.tryGetSitemap(true),
|
||||
...(cachedResult ? [] : pagePromises),
|
||||
]);
|
||||
|
||||
if (!cachedResult) {
|
||||
allResults = searchResults;
|
||||
}
|
||||
|
||||
if (sitemap !== null) {
|
||||
sitemap.forEach((x) => {
|
||||
links.push(x.url);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let mapResults = allResults
|
||||
.flat()
|
||||
.filter((result) => result !== null && result !== undefined);
|
||||
|
||||
const minumumCutoff = Math.min(MAX_MAP_LIMIT, limit);
|
||||
if (mapResults.length > minumumCutoff) {
|
||||
mapResults = mapResults.slice(0, minumumCutoff);
|
||||
}
|
||||
|
||||
if (mapResults.length > 0) {
|
||||
if (req.body.search) {
|
||||
// Ensure all map results are first, maintaining their order
|
||||
links = [
|
||||
mapResults[0].url,
|
||||
...mapResults.slice(1).map((x) => x.url),
|
||||
...links,
|
||||
];
|
||||
} else {
|
||||
mapResults.map((x) => {
|
||||
links.push(x.url);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
// Perform cosine similarity between the search query and the list of links
|
||||
if (req.body.search) {
|
||||
const searchQuery = req.body.search.toLowerCase();
|
||||
|
|
|
@ -175,9 +175,21 @@ export const scrapeRequestSchema = scrapeOptions.extend({
|
|||
export type ScrapeRequest = z.infer<typeof scrapeRequestSchema>;
|
||||
export type ScrapeRequestInput = z.input<typeof scrapeRequestSchema>;
|
||||
|
||||
export const webhookSchema = z.preprocess(x => {
|
||||
if (typeof x === "string") {
|
||||
return { url: x };
|
||||
} else {
|
||||
return x;
|
||||
}
|
||||
}, z.object({
|
||||
url: z.string().url(),
|
||||
headers: z.record(z.string(), z.string()).default({}),
|
||||
}).strict(strictMessage))
|
||||
|
||||
export const batchScrapeRequestSchema = scrapeOptions.extend({
|
||||
urls: url.array(),
|
||||
origin: z.string().optional().default("api"),
|
||||
webhook: webhookSchema.optional(),
|
||||
}).strict(strictMessage).refine(
|
||||
(obj) => {
|
||||
const hasExtractFormat = obj.formats?.includes("extract");
|
||||
|
@ -224,7 +236,7 @@ export const crawlRequestSchema = crawlerOptions.extend({
|
|||
url,
|
||||
origin: z.string().optional().default("api"),
|
||||
scrapeOptions: scrapeOptions.omit({ timeout: true }).default({}),
|
||||
webhook: z.string().url().optional(),
|
||||
webhook: webhookSchema.optional(),
|
||||
limit: z.number().default(10000),
|
||||
}).strict(strictMessage);
|
||||
|
||||
|
@ -249,6 +261,7 @@ export const mapRequestSchema = crawlerOptions.extend({
|
|||
includeSubdomains: z.boolean().default(true),
|
||||
search: z.string().optional(),
|
||||
ignoreSitemap: z.boolean().default(false),
|
||||
sitemapOnly: z.boolean().default(false),
|
||||
limit: z.number().min(1).max(5000).default(5000),
|
||||
}).strict(strictMessage);
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ app.use((err: unknown, req: Request<{}, ErrorResponse, undefined>, res: Response
|
|||
}
|
||||
|
||||
logger.error("Error occurred in request! (" + req.path + ") -- ID " + id + " -- " + verbose);
|
||||
res.status(500).json({ success: false, error: "An unexpected error occurred. Please contact hello@firecrawl.com for help. Your exception ID is " + id });
|
||||
res.status(500).json({ success: false, error: "An unexpected error occurred. Please contact help@firecrawl.com for help. Your exception ID is " + id });
|
||||
});
|
||||
|
||||
logger.info(`Worker ${process.pid} started`);
|
||||
|
|
|
@ -166,10 +166,11 @@ export async function lockURLs(id: string, sc: StoredCrawl, urls: string[]): Pro
|
|||
return res;
|
||||
}
|
||||
|
||||
export function crawlToCrawler(id: string, sc: StoredCrawl): WebCrawler {
|
||||
export function crawlToCrawler(id: string, sc: StoredCrawl, newBase?: string): WebCrawler {
|
||||
const crawler = new WebCrawler({
|
||||
jobId: id,
|
||||
initialUrl: sc.originUrl!,
|
||||
baseUrl: newBase ? new URL(newBase).origin : undefined,
|
||||
includes: sc.crawlerOptions?.includes ?? [],
|
||||
excludes: sc.crawlerOptions?.excludes ?? [],
|
||||
maxCrawledLinks: sc.crawlerOptions?.maxCrawledLinks ?? 1000,
|
||||
|
|
|
@ -6,22 +6,28 @@ import * as Sentry from "@sentry/node";
|
|||
|
||||
import dotenv from 'dotenv';
|
||||
import { logger } from './logger';
|
||||
import { stat } from 'fs/promises';
|
||||
dotenv.config();
|
||||
|
||||
// TODO: add a timeout to the Go parser
|
||||
const goExecutablePath = join(process.cwd(), 'sharedLibs', 'go-html-to-md', 'html-to-markdown.so');
|
||||
|
||||
class GoMarkdownConverter {
|
||||
private static instance: GoMarkdownConverter;
|
||||
private convert: any;
|
||||
|
||||
private constructor() {
|
||||
const goExecutablePath = join(process.cwd(), 'sharedLibs', 'go-html-to-md', 'html-to-markdown.so');
|
||||
const lib = koffi.load(goExecutablePath);
|
||||
this.convert = lib.func('ConvertHTMLToMarkdown', 'string', ['string']);
|
||||
}
|
||||
|
||||
public static getInstance(): GoMarkdownConverter {
|
||||
public static async getInstance(): Promise<GoMarkdownConverter> {
|
||||
if (!GoMarkdownConverter.instance) {
|
||||
try {
|
||||
await stat(goExecutablePath);
|
||||
} catch (_) {
|
||||
throw Error("Go shared library not found");
|
||||
}
|
||||
GoMarkdownConverter.instance = new GoMarkdownConverter();
|
||||
}
|
||||
return GoMarkdownConverter.instance;
|
||||
|
@ -47,7 +53,7 @@ export async function parseMarkdown(html: string | null | undefined): Promise<st
|
|||
|
||||
try {
|
||||
if (process.env.USE_GO_MARKDOWN_PARSER == "true") {
|
||||
const converter = GoMarkdownConverter.getInstance();
|
||||
const converter = await GoMarkdownConverter.getInstance();
|
||||
let markdownContent = await converter.convertHTMLToMarkdown(html);
|
||||
|
||||
markdownContent = processMultiLineLinks(markdownContent);
|
||||
|
@ -56,8 +62,12 @@ export async function parseMarkdown(html: string | null | undefined): Promise<st
|
|||
return markdownContent;
|
||||
}
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
logger.error(`Error converting HTML to Markdown with Go parser: ${error}`);
|
||||
if (!(error instanceof Error) || error.message !== "Go shared library not found") {
|
||||
Sentry.captureException(error);
|
||||
logger.error(`Error converting HTML to Markdown with Go parser: ${error}`);
|
||||
} else {
|
||||
logger.warn("Tried to use Go parser, but it doesn't exist in the file system.", { goExecutablePath });
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to TurndownService if Go parser fails or is not enabled
|
||||
|
@ -89,7 +99,7 @@ export async function parseMarkdown(html: string | null | undefined): Promise<st
|
|||
|
||||
return markdownContent;
|
||||
} catch (error) {
|
||||
console.error("Error converting HTML to Markdown: ", error);
|
||||
logger.error("Error converting HTML to Markdown", {error});
|
||||
return ""; // Optionally return an empty string or handle the error as needed
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ export class WebCrawler {
|
|||
constructor({
|
||||
jobId,
|
||||
initialUrl,
|
||||
baseUrl,
|
||||
includes,
|
||||
excludes,
|
||||
maxCrawledLinks = 10000,
|
||||
|
@ -38,6 +39,7 @@ export class WebCrawler {
|
|||
}: {
|
||||
jobId: string;
|
||||
initialUrl: string;
|
||||
baseUrl?: string;
|
||||
includes?: string[];
|
||||
excludes?: string[];
|
||||
maxCrawledLinks?: number;
|
||||
|
@ -49,7 +51,7 @@ export class WebCrawler {
|
|||
}) {
|
||||
this.jobId = jobId;
|
||||
this.initialUrl = initialUrl;
|
||||
this.baseUrl = new URL(initialUrl).origin;
|
||||
this.baseUrl = baseUrl ?? new URL(initialUrl).origin;
|
||||
this.includes = Array.isArray(includes) ? includes : [];
|
||||
this.excludes = Array.isArray(excludes) ? excludes : [];
|
||||
this.limit = limit;
|
||||
|
@ -63,7 +65,12 @@ export class WebCrawler {
|
|||
this.allowExternalContentLinks = allowExternalContentLinks ?? false;
|
||||
}
|
||||
|
||||
public filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
|
||||
public filterLinks(sitemapLinks: string[], limit: number, maxDepth: number, fromMap: boolean = false): string[] {
|
||||
// If the initial URL is a sitemap.xml, skip filtering
|
||||
if (this.initialUrl.endsWith('sitemap.xml') && fromMap) {
|
||||
return sitemapLinks.slice(0, limit);
|
||||
}
|
||||
|
||||
return sitemapLinks
|
||||
.filter((link) => {
|
||||
let url: URL;
|
||||
|
@ -157,11 +164,14 @@ export class WebCrawler {
|
|||
this.robots = robotsParser(this.robotsTxtUrl, txt);
|
||||
}
|
||||
|
||||
public async tryGetSitemap(): Promise<{ url: string; html: string; }[] | null> {
|
||||
public async tryGetSitemap(fromMap: boolean = false, onlySitemap: boolean = false): Promise<{ url: string; html: string; }[] | null> {
|
||||
logger.debug(`Fetching sitemap links from ${this.initialUrl}`);
|
||||
const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl);
|
||||
if(fromMap && onlySitemap) {
|
||||
return sitemapLinks.map(link => ({ url: link, html: "" }));
|
||||
}
|
||||
if (sitemapLinks.length > 0) {
|
||||
let filteredLinks = this.filterLinks(sitemapLinks, this.limit, this.maxCrawledDepth);
|
||||
let filteredLinks = this.filterLinks(sitemapLinks, this.limit, this.maxCrawledDepth, fromMap);
|
||||
return filteredLinks.map(link => ({ url: link, html: "" }));
|
||||
}
|
||||
return null;
|
||||
|
@ -351,6 +361,7 @@ export class WebCrawler {
|
|||
return url;
|
||||
};
|
||||
|
||||
|
||||
const sitemapUrl = url.endsWith("/sitemap.xml")
|
||||
? url
|
||||
: `${url}/sitemap.xml`;
|
||||
|
|
|
@ -24,7 +24,7 @@ export async function getLinksFromSitemap(
|
|||
const response = await axios.get(sitemapUrl, { timeout: axiosTimeout });
|
||||
content = response.data;
|
||||
} else if (mode === 'fire-engine') {
|
||||
const response = await scrapeURL("sitemap", sitemapUrl, scrapeOptions.parse({ formats: ["rawHtml"] }), { forceEngine: "fire-engine;playwright" });;
|
||||
const response = await scrapeURL("sitemap", sitemapUrl, scrapeOptions.parse({ formats: ["rawHtml"] }), { forceEngine: "fire-engine;tlsclient", v0DisableJsDom: true });
|
||||
if (!response.success) {
|
||||
throw response.error;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ export class NoEnginesLeftError extends Error {
|
|||
public results: EngineResultsTracker;
|
||||
|
||||
constructor(fallbackList: Engine[], results: EngineResultsTracker) {
|
||||
super("All scraping engines failed!");
|
||||
super("All scraping engines failed! -- Double check the URL to make sure it's not broken. If the issue persists, contact us at help@firecrawl.com.");
|
||||
this.fallbackList = fallbackList;
|
||||
this.results = results;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import axios from "axios";
|
||||
import dotenv from "dotenv";
|
||||
import { SearchResult } from "../../src/lib/entities";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
@ -6,7 +5,6 @@ import { logger } from "../lib/logger";
|
|||
|
||||
dotenv.config();
|
||||
|
||||
|
||||
export async function fireEngineMap(
|
||||
q: string,
|
||||
options: {
|
||||
|
@ -37,18 +35,18 @@ export async function fireEngineMap(
|
|||
return [];
|
||||
}
|
||||
|
||||
let config = {
|
||||
const response = await fetch(`${process.env.FIRE_ENGINE_BETA_URL}/search`, {
|
||||
method: "POST",
|
||||
url: `${process.env.FIRE_ENGINE_BETA_URL}/search`,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"X-Disable-Cache": "true"
|
||||
"X-Disable-Cache": "true",
|
||||
},
|
||||
data: data,
|
||||
};
|
||||
const response = await axios(config);
|
||||
if (response && response.data) {
|
||||
return response.data;
|
||||
body: data,
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const responseData = await response.json();
|
||||
return responseData;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import { logger } from "../../../src/lib/logger";
|
|||
import { sendSlackWebhook } from "../alerts/slack";
|
||||
import { getNotificationString } from "./notification_string";
|
||||
import { AuthCreditUsageChunk } from "../../controllers/v1/types";
|
||||
import { redlock } from "../redlock";
|
||||
|
||||
const emailTemplates: Record<
|
||||
NotificationType,
|
||||
|
@ -22,7 +23,7 @@ const emailTemplates: Record<
|
|||
},
|
||||
[NotificationType.RATE_LIMIT_REACHED]: {
|
||||
subject: "Rate Limit Reached - Firecrawl",
|
||||
html: "Hey there,<br/><p>You've hit one of the Firecrawl endpoint's rate limit! Take a breather and try again in a few moments. If you need higher rate limits, consider upgrading your plan. Check out our <a href='https://firecrawl.dev/pricing'>pricing page</a> for more info.</p><p>If you have any questions, feel free to reach out to us at <a href='mailto:hello@firecrawl.com'>hello@firecrawl.com</a></p><br/>Thanks,<br/>Firecrawl Team<br/><br/>Ps. this email is only sent once every 7 days if you reach a rate limit.",
|
||||
html: "Hey there,<br/><p>You've hit one of the Firecrawl endpoint's rate limit! Take a breather and try again in a few moments. If you need higher rate limits, consider upgrading your plan. Check out our <a href='https://firecrawl.dev/pricing'>pricing page</a> for more info.</p><p>If you have any questions, feel free to reach out to us at <a href='mailto:help@firecrawl.com'>help@firecrawl.com</a></p><br/>Thanks,<br/>Firecrawl Team<br/><br/>Ps. this email is only sent once every 7 days if you reach a rate limit.",
|
||||
},
|
||||
[NotificationType.AUTO_RECHARGE_SUCCESS]: {
|
||||
subject: "Auto recharge successful - Firecrawl",
|
||||
|
@ -30,7 +31,7 @@ const emailTemplates: Record<
|
|||
},
|
||||
[NotificationType.AUTO_RECHARGE_FAILED]: {
|
||||
subject: "Auto recharge failed - Firecrawl",
|
||||
html: "Hey there,<br/><p>Your auto recharge failed. Please try again manually. If the issue persists, please reach out to us at <a href='mailto:hello@firecrawl.com'>hello@firecrawl.com</a></p><br/>Thanks,<br/>Firecrawl Team<br/>",
|
||||
html: "Hey there,<br/><p>Your auto recharge failed. Please try again manually. If the issue persists, please reach out to us at <a href='mailto:help@firecrawl.com'>help@firecrawl.com</a></p><br/>Thanks,<br/>Firecrawl Team<br/>",
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -62,7 +63,7 @@ export async function sendEmailNotification(
|
|||
const { data, error } = await resend.emails.send({
|
||||
from: "Firecrawl <firecrawl@getmendableai.com>",
|
||||
to: [email],
|
||||
reply_to: "hello@firecrawl.com",
|
||||
reply_to: "help@firecrawl.com",
|
||||
subject: emailTemplates[notificationType].subject,
|
||||
html: emailTemplates[notificationType].html,
|
||||
});
|
||||
|
@ -88,6 +89,7 @@ export async function sendNotificationInternal(
|
|||
if (team_id === "preview") {
|
||||
return { success: true };
|
||||
}
|
||||
return await redlock.using([`notification-lock:${team_id}:${notificationType}`], 5000, async () => {
|
||||
|
||||
if (!bypassRecentChecks) {
|
||||
const fifteenDaysAgo = new Date();
|
||||
|
@ -171,5 +173,6 @@ export async function sendNotificationInternal(
|
|||
return { success: false };
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
return { success: true };
|
||||
});
|
||||
}
|
||||
|
|
|
@ -262,7 +262,7 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||
document: null,
|
||||
project_id: job.data.project_id,
|
||||
error:
|
||||
"URL is blocked. Suspecious activity detected. Please contact hello@firecrawl.com if you believe this is an error.",
|
||||
"URL is blocked. Suspecious activity detected. Please contact help@firecrawl.com if you believe this is an error.",
|
||||
};
|
||||
return data;
|
||||
}
|
||||
|
@ -352,7 +352,7 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||
|
||||
if (!job.data.sitemapped && job.data.crawlerOptions !== null) {
|
||||
if (!sc.cancelled) {
|
||||
const crawler = crawlToCrawler(job.data.crawl_id, sc);
|
||||
const crawler = crawlToCrawler(job.data.crawl_id, sc, doc.metadata.url ?? doc.metadata.sourceURL ?? sc.originUrl);
|
||||
|
||||
const links = crawler.filterLinks(
|
||||
crawler.extractLinksFromHTML(rawHtml ?? "", doc.metadata?.url ?? doc.metadata?.sourceURL ?? sc.originUrl as string),
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
import axios from "axios";
|
||||
import { logger } from "../../src/lib/logger";
|
||||
import { logger } from "../lib/logger";
|
||||
import { supabase_service } from "./supabase";
|
||||
import { WebhookEventType } from "../types";
|
||||
import { configDotenv } from "dotenv";
|
||||
import { z } from "zod";
|
||||
import { webhookSchema } from "../controllers/v1/types";
|
||||
configDotenv();
|
||||
|
||||
export const callWebhook = async (
|
||||
teamId: string,
|
||||
id: string,
|
||||
data: any | null,
|
||||
specified?: string,
|
||||
specified?: z.infer<typeof webhookSchema>,
|
||||
v1 = false,
|
||||
eventType: WebhookEventType = "crawl.page",
|
||||
awaitWebhook: boolean = false
|
||||
|
@ -20,7 +22,7 @@ export const callWebhook = async (
|
|||
id
|
||||
);
|
||||
const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === "true";
|
||||
let webhookUrl = specified ?? selfHostedUrl;
|
||||
let webhookUrl = specified ?? (selfHostedUrl ? webhookSchema.parse({ url: selfHostedUrl }) : undefined);
|
||||
|
||||
// Only fetch the webhook URL from the database if the self-hosted webhook URL and specified webhook are not set
|
||||
// and the USE_DB_AUTHENTICATION environment variable is set to true
|
||||
|
@ -73,7 +75,7 @@ export const callWebhook = async (
|
|||
if (awaitWebhook) {
|
||||
try {
|
||||
await axios.post(
|
||||
webhookUrl,
|
||||
webhookUrl.url,
|
||||
{
|
||||
success: !v1
|
||||
? data.success
|
||||
|
@ -92,6 +94,7 @@ export const callWebhook = async (
|
|||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...webhookUrl.headers,
|
||||
},
|
||||
timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1)
|
||||
}
|
||||
|
@ -104,7 +107,7 @@ export const callWebhook = async (
|
|||
} else {
|
||||
axios
|
||||
.post(
|
||||
webhookUrl,
|
||||
webhookUrl.url,
|
||||
{
|
||||
success: !v1
|
||||
? data.success
|
||||
|
@ -123,6 +126,7 @@ export const callWebhook = async (
|
|||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...webhookUrl.headers,
|
||||
},
|
||||
timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { AuthCreditUsageChunk, ScrapeOptions, Document as V1Document } from "./controllers/v1/types";
|
||||
import { z } from "zod";
|
||||
import { AuthCreditUsageChunk, ScrapeOptions, Document as V1Document, webhookSchema } from "./controllers/v1/types";
|
||||
import { ExtractorOptions, Document } from "./lib/entities";
|
||||
import { InternalOptions } from "./scraper/scrapeURL";
|
||||
|
||||
|
@ -33,7 +34,7 @@ export interface WebScraperOptions {
|
|||
origin?: string;
|
||||
crawl_id?: string;
|
||||
sitemapped?: boolean;
|
||||
webhook?: string;
|
||||
webhook?: z.infer<typeof webhookSchema>;
|
||||
v1?: boolean;
|
||||
is_scrape?: boolean;
|
||||
}
|
||||
|
@ -165,4 +166,4 @@ export type PlanType =
|
|||
| "";
|
||||
|
||||
|
||||
export type WebhookEventType = "crawl.page" | "batch_scrape.page" | "crawl.started" | "crawl.completed" | "batch_scrape.completed" | "crawl.failed";
|
||||
export type WebhookEventType = "crawl.page" | "batch_scrape.page" | "crawl.started" | "batch_scrape.started" | "crawl.completed" | "batch_scrape.completed" | "crawl.failed";
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@mendable/firecrawl-js",
|
||||
"version": "1.8.1",
|
||||
"version": "1.8.4",
|
||||
"description": "JavaScript SDK for Firecrawl API",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
|
|
@ -153,7 +153,10 @@ export interface CrawlParams {
|
|||
allowExternalLinks?: boolean;
|
||||
ignoreSitemap?: boolean;
|
||||
scrapeOptions?: CrawlScrapeOptions;
|
||||
webhook?: string;
|
||||
webhook?: string | {
|
||||
url: string;
|
||||
headers?: Record<string, string>;
|
||||
};
|
||||
deduplicateSimilarURLs?: boolean;
|
||||
ignoreQueryParameters?: boolean;
|
||||
}
|
||||
|
@ -218,6 +221,7 @@ export interface MapParams {
|
|||
search?: string;
|
||||
ignoreSitemap?: boolean;
|
||||
includeSubdomains?: boolean;
|
||||
sitemapOnly?: boolean;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
|
@ -540,16 +544,18 @@ export default class FirecrawlApp {
|
|||
* @param params - Additional parameters for the scrape request.
|
||||
* @param pollInterval - Time in seconds for job status checks.
|
||||
* @param idempotencyKey - Optional idempotency key for the request.
|
||||
* @param webhook - Optional webhook for the batch scrape.
|
||||
* @returns The response from the crawl operation.
|
||||
*/
|
||||
async batchScrapeUrls(
|
||||
urls: string[],
|
||||
params?: ScrapeParams,
|
||||
pollInterval: number = 2,
|
||||
idempotencyKey?: string
|
||||
idempotencyKey?: string,
|
||||
webhook?: CrawlParams["webhook"],
|
||||
): Promise<BatchScrapeStatusResponse | ErrorResponse> {
|
||||
const headers = this.prepareHeaders(idempotencyKey);
|
||||
let jsonData: any = { urls, ...(params ?? {}) };
|
||||
let jsonData: any = { urls, ...(params ?? {}), webhook };
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest(
|
||||
this.apiUrl + `/v1/batch/scrape`,
|
||||
|
|
6
examples/aginews-ai-newsletter/README.md
Normal file
6
examples/aginews-ai-newsletter/README.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
# AGI News ✨
|
||||
AGI News is a daily AI newsletter that's completely sourced by autonomous AI agents. It is live at [https://www.aginews.io/](https://www.aginews.io/)
|
||||
|
||||
Here is a link to the repo:
|
||||
|
||||
[https://github.com/ericciarla/aginews](https://github.com/ericciarla/aginews)
|
Loading…
Reference in New Issue
Block a user