mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 19:58:08 +08:00
feat(scrapeURL): more intense tests
This commit is contained in:
parent
29ca8ce845
commit
33555742a7
|
@ -2,232 +2,258 @@ import "dotenv/config";
|
||||||
|
|
||||||
import { scrapeURL } from ".";
|
import { scrapeURL } from ".";
|
||||||
import { scrapeOptions } from "../../controllers/v1/types";
|
import { scrapeOptions } from "../../controllers/v1/types";
|
||||||
|
import { Engine } from "./engines";
|
||||||
|
|
||||||
|
const testEngines: (Engine | undefined)[] = [
|
||||||
|
undefined,
|
||||||
|
"fire-engine;chrome-cdp",
|
||||||
|
"fire-engine;playwright",
|
||||||
|
"fire-engine;tlsclient",
|
||||||
|
"scrapingbee",
|
||||||
|
"scrapingbeeLoad",
|
||||||
|
];
|
||||||
|
|
||||||
|
const testEnginesScreenshot: (Engine | undefined)[] = [
|
||||||
|
undefined,
|
||||||
|
"fire-engine;chrome-cdp",
|
||||||
|
"fire-engine;playwright",
|
||||||
|
"scrapingbee",
|
||||||
|
"scrapingbeeLoad",
|
||||||
|
];
|
||||||
|
|
||||||
describe("Standalone scrapeURL tests", () => {
|
describe("Standalone scrapeURL tests", () => {
|
||||||
it("Basic scrape", async () => {
|
describe.each(testEngines)("Engine %s", (forceEngine: Engine | undefined) => {
|
||||||
const out = await scrapeURL("test:scrape-basic", "https://www.roastmywebsite.ai/", scrapeOptions.parse({}));
|
it("Basic scrape", async () => {
|
||||||
|
const out = await scrapeURL("test:scrape-basic", "https://www.roastmywebsite.ai/", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).not.toHaveProperty("content");
|
expect(out.document).not.toHaveProperty("content");
|
||||||
expect(out.document).toHaveProperty("markdown");
|
expect(out.document).toHaveProperty("markdown");
|
||||||
expect(out.document).toHaveProperty("metadata");
|
expect(out.document).toHaveProperty("metadata");
|
||||||
expect(out.document).not.toHaveProperty("html");
|
expect(out.document).not.toHaveProperty("html");
|
||||||
expect(out.document.markdown).toContain("_Roast_");
|
expect(out.document.markdown).toContain("_Roast_");
|
||||||
expect(out.document.metadata.error).toBeUndefined();
|
expect(out.document.metadata.error).toBeUndefined();
|
||||||
expect(out.document.metadata.title).toBe("Roast My Website");
|
expect(out.document.metadata.title).toBe("Roast My Website");
|
||||||
expect(out.document.metadata.description).toBe(
|
expect(out.document.metadata.description).toBe(
|
||||||
"Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"
|
"Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"
|
||||||
);
|
);
|
||||||
expect(out.document.metadata.keywords).toBe(
|
expect(out.document.metadata.keywords).toBe(
|
||||||
"Roast My Website,Roast,Website,GitHub,Firecrawl"
|
"Roast My Website,Roast,Website,GitHub,Firecrawl"
|
||||||
);
|
);
|
||||||
expect(out.document.metadata.robots).toBe("follow, index");
|
expect(out.document.metadata.robots).toBe("follow, index");
|
||||||
expect(out.document.metadata.ogTitle).toBe("Roast My Website");
|
expect(out.document.metadata.ogTitle).toBe("Roast My Website");
|
||||||
expect(out.document.metadata.ogDescription).toBe(
|
expect(out.document.metadata.ogDescription).toBe(
|
||||||
"Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"
|
"Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"
|
||||||
);
|
);
|
||||||
expect(out.document.metadata.ogUrl).toBe(
|
expect(out.document.metadata.ogUrl).toBe(
|
||||||
"https://www.roastmywebsite.ai"
|
"https://www.roastmywebsite.ai"
|
||||||
);
|
);
|
||||||
expect(out.document.metadata.ogImage).toBe(
|
expect(out.document.metadata.ogImage).toBe(
|
||||||
"https://www.roastmywebsite.ai/og.png"
|
"https://www.roastmywebsite.ai/og.png"
|
||||||
);
|
);
|
||||||
expect(out.document.metadata.ogLocaleAlternate).toStrictEqual([]);
|
expect(out.document.metadata.ogLocaleAlternate).toStrictEqual([]);
|
||||||
expect(out.document.metadata.ogSiteName).toBe("Roast My Website");
|
expect(out.document.metadata.ogSiteName).toBe("Roast My Website");
|
||||||
expect(out.document.metadata.sourceURL).toBe(
|
expect(out.document.metadata.sourceURL).toBe(
|
||||||
"https://www.roastmywebsite.ai/"
|
"https://www.roastmywebsite.ai/"
|
||||||
);
|
);
|
||||||
expect(out.document.metadata.statusCode).toBe(200);
|
expect(out.document.metadata.statusCode).toBe(200);
|
||||||
}
|
}
|
||||||
|
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape with formats markdown and html", async () => {
|
it("Scrape with formats markdown and html", async () => {
|
||||||
const out = await scrapeURL("test:scrape-formats-markdown-html", "https://roastmywebsite.ai", scrapeOptions.parse({
|
const out = await scrapeURL("test:scrape-formats-markdown-html", "https://roastmywebsite.ai", scrapeOptions.parse({
|
||||||
formats: ["markdown", "html"],
|
formats: ["markdown", "html"],
|
||||||
}));
|
}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty("markdown");
|
expect(out.document).toHaveProperty("markdown");
|
||||||
expect(out.document).toHaveProperty("html");
|
expect(out.document).toHaveProperty("html");
|
||||||
expect(out.document).toHaveProperty("metadata");
|
expect(out.document).toHaveProperty("metadata");
|
||||||
expect(out.document.markdown).toContain("_Roast_");
|
expect(out.document.markdown).toContain("_Roast_");
|
||||||
expect(out.document.html).toContain("<h1");
|
expect(out.document.html).toContain("<h1");
|
||||||
expect(out.document.metadata.statusCode).toBe(200);
|
expect(out.document.metadata.statusCode).toBe(200);
|
||||||
expect(out.document.metadata.error).toBeUndefined();
|
expect(out.document.metadata.error).toBeUndefined();
|
||||||
}
|
}
|
||||||
|
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape with onlyMainContent disabled", async () => {
|
it("Scrape with onlyMainContent disabled", async () => {
|
||||||
const out = await scrapeURL("test:scrape-onlyMainContent-false", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
const out = await scrapeURL("test:scrape-onlyMainContent-false", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
||||||
onlyMainContent: false,
|
onlyMainContent: false,
|
||||||
}));
|
}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty("markdown");
|
expect(out.document).toHaveProperty("markdown");
|
||||||
expect(out.document).toHaveProperty("metadata");
|
expect(out.document).toHaveProperty("metadata");
|
||||||
expect(out.document).not.toHaveProperty("html");
|
expect(out.document).not.toHaveProperty("html");
|
||||||
expect(out.document.markdown).toContain("[FAQ](/faq/)"); // .nav
|
expect(out.document.markdown).toContain("[FAQ](/faq/)"); // .nav
|
||||||
expect(out.document.markdown).toContain("Hartley Brody 2023"); // #footer
|
expect(out.document.markdown).toContain("Hartley Brody 2023"); // #footer
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape with excludeTags", async () => {
|
it("Scrape with excludeTags", async () => {
|
||||||
const out = await scrapeURL("test:scrape-excludeTags", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
const out = await scrapeURL("test:scrape-excludeTags", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
||||||
onlyMainContent: false,
|
onlyMainContent: false,
|
||||||
excludeTags: ['.nav', '#footer', 'strong'],
|
excludeTags: ['.nav', '#footer', 'strong'],
|
||||||
}));
|
}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty("markdown");
|
expect(out.document).toHaveProperty("markdown");
|
||||||
expect(out.document).toHaveProperty("metadata");
|
expect(out.document).toHaveProperty("metadata");
|
||||||
expect(out.document).not.toHaveProperty("html");
|
expect(out.document).not.toHaveProperty("html");
|
||||||
expect(out.document.markdown).not.toContain("Hartley Brody 2023");
|
expect(out.document.markdown).not.toContain("Hartley Brody 2023");
|
||||||
expect(out.document.markdown).not.toContain("[FAQ](/faq/)");
|
expect(out.document.markdown).not.toContain("[FAQ](/faq/)");
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape of a page with 400 status code", async () => {
|
it("Scrape of a page with 400 status code", async () => {
|
||||||
const out = await scrapeURL("test:scrape-400", "https://httpstat.us/400", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-400", "https://httpstat.us/400", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(400);
|
expect(out.document.metadata.statusCode).toBe(400);
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape of a page with 401 status code", async () => {
|
it("Scrape of a page with 401 status code", async () => {
|
||||||
const out = await scrapeURL("test:scrape-401", "https://httpstat.us/401", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-401", "https://httpstat.us/401", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(401);
|
expect(out.document.metadata.statusCode).toBe(401);
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape of a page with 403 status code", async () => {
|
it("Scrape of a page with 403 status code", async () => {
|
||||||
const out = await scrapeURL("test:scrape-403", "https://httpstat.us/403", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-403", "https://httpstat.us/403", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(403);
|
expect(out.document.metadata.statusCode).toBe(403);
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape of a page with 404 status code", async () => {
|
it("Scrape of a page with 404 status code", async () => {
|
||||||
const out = await scrapeURL("test:scrape-404", "https://httpstat.us/404", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-404", "https://httpstat.us/404", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(404);
|
expect(out.document.metadata.statusCode).toBe(404);
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape of a page with 405 status code", async () => {
|
it("Scrape of a page with 405 status code", async () => {
|
||||||
const out = await scrapeURL("test:scrape-405", "https://httpstat.us/405", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-405", "https://httpstat.us/405", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(405);
|
expect(out.document.metadata.statusCode).toBe(405);
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape of a page with 500 status code", async () => {
|
it("Scrape of a page with 500 status code", async () => {
|
||||||
const out = await scrapeURL("test:scrape-500", "https://httpstat.us/500", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-500", "https://httpstat.us/500", scrapeOptions.parse({}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(500);
|
expect(out.document.metadata.statusCode).toBe(500);
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape with screenshot", async () => {
|
it("Scrape a redirected page", async () => {
|
||||||
const out = await scrapeURL("test:scrape-screenshot", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
const out = await scrapeURL("test:scrape-redirect", "https://scrapethissite.com/", scrapeOptions.parse({}), { forceEngine });
|
||||||
formats: ["screenshot"],
|
|
||||||
}));
|
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('screenshot');
|
expect(out.document).toHaveProperty('markdown');
|
||||||
expect(typeof out.document.screenshot).toBe("string");
|
expect(out.document.markdown).toContain("Explore Sandbox");
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(200);
|
expect(out.document.metadata.sourceURL).toBe("https://scrapethissite.com/");
|
||||||
expect(out.document.metadata.error).toBeUndefined();
|
expect(out.document.metadata.url).toBe("https://www.scrapethissite.com/");
|
||||||
}
|
expect(out.document.metadata.statusCode).toBe(200);
|
||||||
}, 30000);
|
expect(out.document.metadata.error).toBeUndefined();
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
});
|
||||||
|
|
||||||
it("Scrape with full-page screenshot", async () => {
|
describe.each(testEnginesScreenshot)("Screenshot on engine %s", (forceEngine: Engine | undefined) => {
|
||||||
const out = await scrapeURL("test:scrape-screenshot-fullPage", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
it("Scrape with screenshot", async () => {
|
||||||
formats: ["screenshot@fullPage"],
|
const out = await scrapeURL("test:scrape-screenshot", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
||||||
}));
|
formats: ["screenshot"],
|
||||||
|
}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('screenshot');
|
expect(out.document).toHaveProperty('screenshot');
|
||||||
expect(typeof out.document.screenshot).toBe("string");
|
expect(typeof out.document.screenshot).toBe("string");
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document.screenshot!.startsWith("https://service.firecrawl.dev/storage/v1/object/public/media/"));
|
||||||
expect(out.document.metadata.statusCode).toBe(200);
|
// TODO: attempt to fetch screenshot
|
||||||
expect(out.document.metadata.error).toBeUndefined();
|
expect(out.document).toHaveProperty('metadata');
|
||||||
}
|
expect(out.document.metadata.statusCode).toBe(200);
|
||||||
}, 30000);
|
expect(out.document.metadata.error).toBeUndefined();
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
it("Scrape a redirected page", async () => {
|
it("Scrape with full-page screenshot", async () => {
|
||||||
const out = await scrapeURL("test:scrape-screenshot-fullPage", "https://scrapethissite.com/", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-screenshot-fullPage", "https://www.scrapethissite.com/", scrapeOptions.parse({
|
||||||
|
formats: ["screenshot@fullPage"],
|
||||||
|
}), { forceEngine });
|
||||||
|
|
||||||
expect(out.logs.length).toBeGreaterThan(0);
|
expect(out.logs.length).toBeGreaterThan(0);
|
||||||
expect(out.success).toBe(true);
|
expect(out.success).toBe(true);
|
||||||
if (out.success) {
|
if (out.success) {
|
||||||
expect(out.document.warning).toBeUndefined();
|
expect(out.document.warning).toBeUndefined();
|
||||||
expect(out.document).toHaveProperty('markdown');
|
expect(out.document).toHaveProperty('screenshot');
|
||||||
expect(out.document.markdown).toContain("Explore Sandbox");
|
expect(typeof out.document.screenshot).toBe("string");
|
||||||
expect(out.document).toHaveProperty('metadata');
|
expect(out.document.screenshot!.startsWith("https://service.firecrawl.dev/storage/v1/object/public/media/"));
|
||||||
expect(out.document.metadata.sourceURL).toBe("https://scrapethissite.com/");
|
// TODO: attempt to fetch screenshot
|
||||||
expect(out.document.metadata.url).toBe("https://www.scrapethissite.com/");
|
expect(out.document).toHaveProperty('metadata');
|
||||||
expect(out.document.metadata.statusCode).toBe(200);
|
expect(out.document.metadata.statusCode).toBe(200);
|
||||||
expect(out.document.metadata.error).toBeUndefined();
|
expect(out.document.metadata.error).toBeUndefined();
|
||||||
}
|
}
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
});
|
||||||
|
|
||||||
it("Scrape of a PDF file", async () => {
|
it("Scrape of a PDF file", async () => {
|
||||||
const out = await scrapeURL("test:scrape-pdf", "https://arxiv.org/pdf/astro-ph/9301001.pdf", scrapeOptions.parse({}));
|
const out = await scrapeURL("test:scrape-pdf", "https://arxiv.org/pdf/astro-ph/9301001.pdf", scrapeOptions.parse({}));
|
||||||
|
@ -320,7 +346,8 @@ describe("Standalone scrapeURL tests", () => {
|
||||||
}
|
}
|
||||||
}, 120000)
|
}, 120000)
|
||||||
|
|
||||||
test.concurrent.each(new Array(100).fill(0).map((_, i) => "https://www.scrapethissite.com/?i=" + i))("Concurrent scrapes", async (url) => {
|
test.concurrent.each(new Array(100).fill(0).map((_, i) => i))("Concurrent scrape #%i", async (i) => {
|
||||||
|
const url = "https://www.scrapethissite.com/?i=" + i;
|
||||||
const id = "test:concurrent:" + url;
|
const id = "test:concurrent:" + url;
|
||||||
const out = await scrapeURL(id, url, scrapeOptions.parse({}));
|
const out = await scrapeURL(id, url, scrapeOptions.parse({}));
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user