Merge pull request #269 from mendableai/feat/allowbackwardcrawling-option

[Feat] Added allowBackwardCrawling option
This commit is contained in:
Rafael Miller 2024-06-12 11:34:39 -03:00 committed by GitHub
commit 48f6c19a05
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 81 additions and 16 deletions

View File

@ -596,7 +596,7 @@ describe("E2E Tests for API Routes", () => {
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: "https://roastmywebsite.ai" }); .send({ url: "https://mendable.ai/blog" });
expect(crawlResponse.statusCode).toBe(200); expect(crawlResponse.statusCode).toBe(200);
let isCompleted = false; let isCompleted = false;
@ -622,7 +622,13 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.data[0]).toHaveProperty("content"); expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown"); expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata"); expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0].content).toContain("_Roast_"); expect(completedResponse.body.data[0].content).toContain("Mendable");
const childrenLinks = completedResponse.body.data.filter(doc =>
doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog")
);
expect(childrenLinks.length).toBe(completedResponse.body.data.length);
}, 120000); // 120 seconds }, 120000); // 120 seconds
it.concurrent('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension', async () => { it.concurrent('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension', async () => {
@ -757,34 +763,82 @@ describe("E2E Tests for API Routes", () => {
}, 60000); }, 60000);
}); // 60 seconds }); // 60 seconds
it.concurrent("should return a successful response for a valid crawl job with allowBackwardCrawling set to true option", async () => {
const crawlResponse = await request(TEST_URL)
.post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send({
url: "https://mendable.ai/blog",
pageOptions: { includeHtml: true },
crawlerOptions: { allowBackwardCrawling: true },
});
expect(crawlResponse.statusCode).toBe(200);
let isFinished = false;
let completedResponse;
while (!isFinished) {
const response = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200);
expect(response.body).toHaveProperty("status");
if (response.body.status === "completed") {
isFinished = true;
completedResponse = response;
} else {
await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again
}
}
expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("completed");
expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0].content).toContain("Mendable");
expect(completedResponse.body.data[0].markdown).toContain("Mendable");
const onlyChildrenLinks = completedResponse.body.data.filter(doc => {
return doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog")
});
expect(completedResponse.body.data.length).toBeGreaterThan(onlyChildrenLinks.length);
}, 60000);
it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => {
const crawlResponse = await request(TEST_URL) const crawlResponse = await request(TEST_URL)
.post("/v0/crawl") .post("/v0/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json") .set("Content-Type", "application/json")
.send({ url: "https://jestjs.io" }); .send({ url: "https://jestjs.io" });
expect(crawlResponse.statusCode).toBe(200); expect(crawlResponse.statusCode).toBe(200);
// wait for 30 seconds
await new Promise((r) => setTimeout(r, 20000)); await new Promise((r) => setTimeout(r, 20000));
const response = await request(TEST_URL) const responseCancel = await request(TEST_URL)
.delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(response.statusCode).toBe(200); expect(responseCancel.statusCode).toBe(200);
expect(response.body).toHaveProperty("status"); expect(responseCancel.body).toHaveProperty("status");
expect(response.body.status).toBe("cancelled"); expect(responseCancel.body.status).toBe("cancelled");
await new Promise((r) => setTimeout(r, 10000)); await new Promise((r) => setTimeout(r, 10000));
const completedResponse = await request(TEST_URL) const completedResponse = await request(TEST_URL)
.get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`)
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`);
expect(completedResponse.statusCode).toBe(200); expect(completedResponse.statusCode).toBe(200);
expect(completedResponse.body).toHaveProperty("status"); expect(completedResponse.body).toHaveProperty("status");
expect(completedResponse.body.status).toBe("failed"); expect(completedResponse.body.status).toBe("failed");
expect(completedResponse.body).toHaveProperty("data"); expect(completedResponse.body).toHaveProperty("data");
expect(completedResponse.body.data).toEqual(null); expect(completedResponse.body.data).toBeNull();
expect(completedResponse.body).toHaveProperty("partial_data"); expect(completedResponse.body).toHaveProperty("partial_data");
expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); expect(completedResponse.body.partial_data[0]).toHaveProperty("content");
expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown");

View File

@ -55,7 +55,7 @@ export async function crawlController(req: Request, res: Response) {
} }
const mode = req.body.mode ?? "crawl"; const mode = req.body.mode ?? "crawl";
const crawlerOptions = req.body.crawlerOptions ?? {}; const crawlerOptions = req.body.crawlerOptions ?? { allowBackwardCrawling: false };
const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false }; const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false };
if (mode === "single_urls" && !url.includes(",")) { if (mode === "single_urls" && !url.includes(",")) {
@ -64,9 +64,7 @@ export async function crawlController(req: Request, res: Response) {
await a.setOptions({ await a.setOptions({
mode: "single_urls", mode: "single_urls",
urls: [url], urls: [url],
crawlerOptions: { crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true },
returnOnlyUrls: true,
},
pageOptions: pageOptions, pageOptions: pageOptions,
}); });
@ -91,7 +89,7 @@ export async function crawlController(req: Request, res: Response) {
const job = await addWebScraperJob({ const job = await addWebScraperJob({
url: url, url: url,
mode: mode ?? "crawl", // fix for single urls not working mode: mode ?? "crawl", // fix for single urls not working
crawlerOptions: { ...crawlerOptions }, crawlerOptions: crawlerOptions,
team_id: team_id, team_id: team_id,
pageOptions: pageOptions, pageOptions: pageOptions,
origin: req.body.origin ?? "api", origin: req.body.origin ?? "api",

View File

@ -47,6 +47,7 @@ export type CrawlerOptions = {
replaceAllPathsWithAbsolutePaths?: boolean; replaceAllPathsWithAbsolutePaths?: boolean;
ignoreSitemap?: boolean; ignoreSitemap?: boolean;
mode?: "default" | "fast"; // have a mode of some sort mode?: "default" | "fast"; // have a mode of some sort
allowBackwardCrawling?: boolean;
} }
export type WebScraperOptions = { export type WebScraperOptions = {

View File

@ -20,6 +20,7 @@ export class WebCrawler {
private robotsTxtUrl: string; private robotsTxtUrl: string;
private robots: any; private robots: any;
private generateImgAltText: boolean; private generateImgAltText: boolean;
private allowBackwardCrawling: boolean;
constructor({ constructor({
initialUrl, initialUrl,
@ -29,6 +30,7 @@ export class WebCrawler {
limit = 10000, limit = 10000,
generateImgAltText = false, generateImgAltText = false,
maxCrawledDepth = 10, maxCrawledDepth = 10,
allowBackwardCrawling = false
}: { }: {
initialUrl: string; initialUrl: string;
includes?: string[]; includes?: string[];
@ -37,6 +39,7 @@ export class WebCrawler {
limit?: number; limit?: number;
generateImgAltText?: boolean; generateImgAltText?: boolean;
maxCrawledDepth?: number; maxCrawledDepth?: number;
allowBackwardCrawling?: boolean;
}) { }) {
this.initialUrl = initialUrl; this.initialUrl = initialUrl;
this.baseUrl = new URL(initialUrl).origin; this.baseUrl = new URL(initialUrl).origin;
@ -49,6 +52,7 @@ export class WebCrawler {
this.maxCrawledLinks = maxCrawledLinks ?? limit; this.maxCrawledLinks = maxCrawledLinks ?? limit;
this.maxCrawledDepth = maxCrawledDepth ?? 10; this.maxCrawledDepth = maxCrawledDepth ?? 10;
this.generateImgAltText = generateImgAltText ?? false; this.generateImgAltText = generateImgAltText ?? false;
this.allowBackwardCrawling = allowBackwardCrawling ?? false;
} }
private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] {
@ -90,10 +94,16 @@ export class WebCrawler {
const linkHostname = normalizedLink.hostname.replace(/^www\./, ''); const linkHostname = normalizedLink.hostname.replace(/^www\./, '');
// Ensure the protocol and hostname match, and the path starts with the initial URL's path // Ensure the protocol and hostname match, and the path starts with the initial URL's path
if (linkHostname !== initialHostname || !normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) { if (linkHostname !== initialHostname) {
return false; return false;
} }
if (!this.allowBackwardCrawling) {
if (!normalizedLink.pathname.startsWith(normalizedInitialUrl.pathname)) {
return false;
}
}
const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true; const isAllowed = this.robots.isAllowed(link, "FireCrawlAgent") ?? true;
// Check if the link is disallowed by robots.txt // Check if the link is disallowed by robots.txt
if (!isAllowed) { if (!isAllowed) {

View File

@ -38,8 +38,8 @@ export class WebScraperDataProvider {
private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" = private generateImgAltTextModel: "gpt-4-turbo" | "claude-3-opus" =
"gpt-4-turbo"; "gpt-4-turbo";
private crawlerMode: string = "default"; private crawlerMode: string = "default";
private allowBackwardCrawling: boolean = false;
authorize(): void { authorize(): void {
throw new Error("Method not implemented."); throw new Error("Method not implemented.");
} }
@ -171,6 +171,7 @@ export class WebScraperDataProvider {
maxCrawledDepth: this.maxCrawledDepth, maxCrawledDepth: this.maxCrawledDepth,
limit: this.limit, limit: this.limit,
generateImgAltText: this.generateImgAltText, generateImgAltText: this.generateImgAltText,
allowBackwardCrawling: this.allowBackwardCrawling,
}); });
let links = await crawler.start( let links = await crawler.start(
@ -481,6 +482,7 @@ export class WebScraperDataProvider {
this.excludes = this.excludes.filter((item) => item !== ""); this.excludes = this.excludes.filter((item) => item !== "");
this.crawlerMode = options.crawlerOptions?.mode ?? "default"; this.crawlerMode = options.crawlerOptions?.mode ?? "default";
this.ignoreSitemap = options.crawlerOptions?.ignoreSitemap ?? false; this.ignoreSitemap = options.crawlerOptions?.ignoreSitemap ?? false;
this.allowBackwardCrawling = options.crawlerOptions?.allowBackwardCrawling ?? false;
// make sure all urls start with https:// // make sure all urls start with https://
this.urls = this.urls.map((url) => { this.urls = this.urls.map((url) => {