Merge branch 'main' into test/load-testing

This commit is contained in:
rafaelsideguide 2024-05-22 11:26:19 -03:00
commit 04a0bef0fb
14 changed files with 1651 additions and 80 deletions

View File

@ -25,6 +25,9 @@ env:
SUPABASE_SERVICE_TOKEN: ${{ secrets.SUPABASE_SERVICE_TOKEN }}
SUPABASE_URL: ${{ secrets.SUPABASE_URL }}
TEST_API_KEY: ${{ secrets.TEST_API_KEY }}
HYPERDX_API_KEY: ${{ secrets.HYPERDX_API_KEY }}
HDX_NODE_BETA_MODE: 1
jobs:
pre-deploy:

View File

@ -31,3 +31,8 @@ POSTHOG_HOST= # set if you'd like to send posthog events like job logs
STRIPE_PRICE_ID_STANDARD=
STRIPE_PRICE_ID_SCALE=
HYPERDX_API_KEY=
HDX_NODE_BETA_MODE=1
FIRE_ENGINE_BETA_URL= # set if you'd like to use the fire engine closed beta

View File

@ -49,6 +49,7 @@
"@bull-board/express": "^5.8.0",
"@devil7softwares/pos": "^1.0.2",
"@dqbd/tiktoken": "^1.0.13",
"@hyperdx/node-opentelemetry": "^0.7.0",
"@logtail/node": "^0.4.12",
"@nangohq/node": "^0.36.33",
"@sentry/node": "^7.48.0",

File diff suppressed because it is too large Load Diff

View File

@ -81,7 +81,7 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data).toHaveProperty("markdown");
expect(response.body.data).toHaveProperty("metadata");
expect(response.body.data).not.toHaveProperty("html");
expect(response.body.data.content).toContain("🔥 FireCrawl");
expect(response.body.data.content).toContain("🔥 Firecrawl");
}, 30000); // 30 seconds timeout
it("should return a successful response with a valid API key and includeHtml set to true", async () => {
@ -99,8 +99,8 @@ describe("E2E Tests for API Routes", () => {
expect(response.body.data).toHaveProperty("markdown");
expect(response.body.data).toHaveProperty("html");
expect(response.body.data).toHaveProperty("metadata");
expect(response.body.data.content).toContain("🔥 FireCrawl");
expect(response.body.data.markdown).toContain("🔥 FireCrawl");
expect(response.body.data.content).toContain("🔥 Firecrawl");
expect(response.body.data.markdown).toContain("🔥 Firecrawl");
expect(response.body.data.html).toContain("<h1");
}, 30000); // 30 seconds timeout
@ -440,8 +440,8 @@ describe("E2E Tests for API Routes", () => {
// 120 seconds
expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
expect(completedResponse.body.data[0].markdown).toContain("FireCrawl");
expect(completedResponse.body.data[0].content).toContain("🔥 Firecrawl");
expect(completedResponse.body.data[0].markdown).toContain("Firecrawl");
expect(completedResponse.body.data[0].html).toContain("<h1");
}, 60000);
});
@ -576,7 +576,7 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.data[0]).toHaveProperty("content");
expect(completedResponse.body.data[0]).toHaveProperty("markdown");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
expect(completedResponse.body.data[0].content).toContain("🔥 Firecrawl");
}, 60000); // 60 seconds
it('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension', async () => {
@ -697,8 +697,8 @@ describe("E2E Tests for API Routes", () => {
// 120 seconds
expect(completedResponse.body.data[0]).toHaveProperty("html");
expect(completedResponse.body.data[0]).toHaveProperty("metadata");
expect(completedResponse.body.data[0].content).toContain("🔥 FireCrawl");
expect(completedResponse.body.data[0].markdown).toContain("FireCrawl");
expect(completedResponse.body.data[0].content).toContain("🔥 Firecrawl");
expect(completedResponse.body.data[0].markdown).toContain("Firecrawl");
expect(completedResponse.body.data[0].html).toContain("<h1");
}, 60000);
}); // 60 seconds

View File

@ -4,11 +4,22 @@ import { AuthResponse, RateLimiterMode } from "../../src/types";
import { supabase_service } from "../../src/services/supabase";
import { withAuth } from "../../src/lib/withAuth";
import { RateLimiterRedis } from "rate-limiter-flexible";
import { setTraceAttributes } from '@hyperdx/node-opentelemetry';
export async function authenticateUser(req, res, mode?: RateLimiterMode) : Promise<AuthResponse> {
return withAuth(supaAuthenticateUser)(req, res, mode);
}
function setTrace(team_id: string, api_key: string) {
try {
setTraceAttributes({
team_id,
api_key
});
} catch (error) {
console.error('Error setting trace attributes:', error);
}
}
export async function supaAuthenticateUser(
req,
res,
@ -78,11 +89,13 @@ export async function supaAuthenticateUser(
status: 401,
};
}
const team_id = data[0].team_id;
const plan = getPlanByPriceId(data[0].price_id);
// HyperDX Logging
setTrace(team_id, normalizedApi);
subscriptionData = {
team_id: data[0].team_id,
plan: getPlanByPriceId(data[0].price_id)
team_id: team_id,
plan: plan
}
switch (mode) {
case RateLimiterMode.Crawl:

View File

@ -5,6 +5,8 @@ import "dotenv/config";
import { getWebScraperQueue } from "./services/queue-service";
import { redisClient } from "./services/rate-limiter";
import { v0Router } from "./routes/v0";
import { initSDK } from '@hyperdx/node-opentelemetry';
const { createBullBoard } = require("@bull-board/api");
const { BullAdapter } = require("@bull-board/api/bullAdapter");
const { ExpressAdapter } = require("@bull-board/express");
@ -47,6 +49,11 @@ const DEFAULT_PORT = process.env.PORT ?? 3002;
const HOST = process.env.HOST ?? "localhost";
redisClient.connect();
// HyperDX OpenTelemetry
if(process.env.ENV === 'production') {
initSDK({ consoleCapture: true, additionalInstrumentations: []});
}
export function startServer(port = DEFAULT_PORT) {
const server = app.listen(Number(port), HOST, () => {

View File

@ -0,0 +1,42 @@
import { scrapWithFireEngine } from "../../src/scraper/WebScraper/single_url";
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const scrapInBatches = async (
urls: string[],
batchSize: number,
delayMs: number
) => {
let successCount = 0;
let errorCount = 0;
for (let i = 0; i < urls.length; i += batchSize) {
const batch = urls
.slice(i, i + batchSize)
.map((url) => scrapWithFireEngine(url));
try {
const results = await Promise.all(batch);
results.forEach((data, index) => {
if (data.trim() === "") {
errorCount++;
} else {
successCount++;
console.log(
`Scraping result ${i + index + 1}:`,
data.trim().substring(0, 20) + "..."
);
}
});
} catch (error) {
console.error("Error during scraping:", error);
}
await delay(delayMs);
}
console.log(`Total successful scrapes: ${successCount}`);
console.log(`Total errored scrapes: ${errorCount}`);
};
function run() {
const urls = Array.from({ length: 200 }, () => "https://scrapethissite.com");
scrapInBatches(urls, 10, 1000);
}

View File

@ -117,7 +117,7 @@ export class WebCrawler {
const response = await axios.get(this.robotsTxtUrl);
this.robots = robotsParser(this.robotsTxtUrl, response.data);
} catch (error) {
console.error(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`);
console.log(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`);
}

View File

@ -10,6 +10,15 @@ import { fetchAndProcessPdf } from "./utils/pdfProcessor";
dotenv.config();
const baseScrapers = [
"fire-engine",
"scrapingBee",
"playwright",
"scrapingBeeLoad",
"fetch",
] as const;
export async function generateRequestParams(
url: string,
wait_browser: string = "domcontentloaded",
@ -33,15 +42,39 @@ export async function generateRequestParams(
return defaultParams;
}
}
export async function scrapWithCustomFirecrawl(
export async function scrapWithFireEngine(
url: string,
options?: any
): Promise<string> {
try {
// TODO: merge the custom firecrawl scraper into mono-repo when ready
return null;
const reqParams = await generateRequestParams(url);
const wait_playwright = reqParams["params"]?.wait ?? 0;
const response = await fetch(process.env.FIRE_ENGINE_BETA_URL+ "/scrape", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ url: url, wait: wait_playwright }),
});
if (!response.ok) {
console.error(
`[Fire-Engine] Error fetching url: ${url} with status: ${response.status}`
);
return "";
}
const contentType = response.headers['content-type'];
if (contentType && contentType.includes('application/pdf')) {
return fetchAndProcessPdf(url);
} else {
const data = await response.json();
const html = data.content;
return html ?? "";
}
} catch (error) {
console.error(`Error scraping with custom firecrawl-scraper: ${error}`);
console.error(`[Fire-Engine][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
@ -63,7 +96,7 @@ export async function scrapWithScrapingBee(
if (response.status !== 200 && response.status !== 404) {
console.error(
`Scraping bee error in ${url} with status code ${response.status}`
`[ScrapingBee] Error fetching url: ${url} with status code ${response.status}`
);
return "";
}
@ -77,7 +110,7 @@ export async function scrapWithScrapingBee(
return text;
}
} catch (error) {
console.error(`Error scraping with Scraping Bee: ${error}`);
console.error(`[ScrapingBee][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
@ -97,7 +130,7 @@ export async function scrapWithPlaywright(url: string): Promise<string> {
if (!response.ok) {
console.error(
`Error fetching w/ playwright server -> URL: ${url} with status: ${response.status}`
`[Playwright] Error fetching url: ${url} with status: ${response.status}`
);
return "";
}
@ -111,11 +144,62 @@ export async function scrapWithPlaywright(url: string): Promise<string> {
return html ?? "";
}
} catch (error) {
console.error(`Error scraping with Puppeteer: ${error}`);
console.error(`[Playwright][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
export async function scrapWithFetch(url: string): Promise<string> {
try {
const response = await fetch(url);
if (!response.ok) {
console.error(
`[Fetch] Error fetching url: ${url} with status: ${response.status}`
);
return "";
}
const contentType = response.headers['content-type'];
if (contentType && contentType.includes('application/pdf')) {
return fetchAndProcessPdf(url);
} else {
const text = await response.text();
return text;
}
} catch (error) {
console.error(`[Fetch][c] Error fetching url: ${url} -> ${error}`);
return "";
}
}
/**
* Get the order of scrapers to be used for scraping a URL
* If the user doesn't have envs set for a specific scraper, it will be removed from the order.
* @param defaultScraper The default scraper to use if the URL does not have a specific scraper order defined
* @returns The order of scrapers to be used for scraping a URL
*/
function getScrapingFallbackOrder(defaultScraper?: string) {
const availableScrapers = baseScrapers.filter(scraper => {
switch (scraper) {
case "scrapingBee":
case "scrapingBeeLoad":
return !!process.env.SCRAPING_BEE_API_KEY;
case "fire-engine":
return !!process.env.FIRE_ENGINE_BETA_URL;
case "playwright":
return !!process.env.PLAYWRIGHT_MICROSERVICE_URL;
default:
return true;
}
});
const defaultOrder = ["scrapingBee", "fire-engine", "playwright", "scrapingBeeLoad", "fetch"];
const filteredDefaultOrder = defaultOrder.filter((scraper: typeof baseScrapers[number]) => availableScrapers.includes(scraper));
const uniqueScrapers = new Set(defaultScraper ? [defaultScraper, ...filteredDefaultOrder, ...availableScrapers] : [...filteredDefaultOrder, ...availableScrapers]);
const scrapersInOrder = Array.from(uniqueScrapers);
return scrapersInOrder as typeof baseScrapers[number][];
}
export async function scrapSingleUrl(
urlToScrap: string,
pageOptions: PageOptions = { onlyMainContent: true, includeHtml: false },
@ -137,17 +221,14 @@ export async function scrapSingleUrl(
const attemptScraping = async (
url: string,
method:
| "firecrawl-scraper"
| "scrapingBee"
| "playwright"
| "scrapingBeeLoad"
| "fetch"
method: typeof baseScrapers[number]
) => {
let text = "";
switch (method) {
case "firecrawl-scraper":
text = await scrapWithCustomFirecrawl(url);
case "fire-engine":
if (process.env.FIRE_ENGINE_BETA_URL) {
text = await scrapWithFireEngine(url);
}
break;
case "scrapingBee":
if (process.env.SCRAPING_BEE_API_KEY) {
@ -169,25 +250,7 @@ export async function scrapSingleUrl(
}
break;
case "fetch":
try {
const response = await fetch(url);
if (!response.ok) {
console.error(
`Error fetching URL: ${url} with status: ${response.status}`
);
return "";
}
const contentType = response.headers['content-type'];
if (contentType && contentType.includes('application/pdf')) {
return fetchAndProcessPdf(url);
} else {
text = await response.text();
}
} catch (error) {
console.error(`Error scraping URL: ${error}`);
return "";
}
text = await scrapWithFetch(url);
break;
}
@ -205,15 +268,7 @@ export async function scrapSingleUrl(
console.error(`Invalid URL key, trying: ${urlToScrap}`);
}
const defaultScraper = urlSpecificParams[urlKey]?.defaultScraper ?? "";
const scrapersInOrder = defaultScraper
? [
defaultScraper,
"scrapingBee",
"playwright",
"scrapingBeeLoad",
"fetch",
]
: ["scrapingBee", "playwright", "scrapingBeeLoad", "fetch"];
const scrapersInOrder = getScrapingFallbackOrder(defaultScraper)
for (const scraper of scrapersInOrder) {
// If exists text coming from crawler, use it
@ -225,7 +280,10 @@ export async function scrapSingleUrl(
}
[text, html] = await attemptScraping(urlToScrap, scraper);
if (text && text.trim().length >= 100) break;
console.log(`Falling back to ${scraper}`);
const nextScraperIndex = scrapersInOrder.indexOf(scraper) + 1;
if (nextScraperIndex < scrapersInOrder.length) {
console.info(`Falling back to ${scrapersInOrder[nextScraperIndex]}`);
}
}
if (!text) {

View File

@ -63,7 +63,7 @@ export const urlSpecificParams = {
},
},
"ycombinator.com":{
defaultScraper: "playwright",
defaultScraper: "fire-engine",
params: {
wait_browser: "networkidle2",
block_resources: false,

View File

@ -227,10 +227,11 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) {
if (creditUsages && creditUsages.length > 0) {
totalCreditsUsed = creditUsages[0].total_credits_used;
console.log("Total Credits Used:", totalCreditsUsed);
// console.log("Total Credits Used:", totalCreditsUsed);
}
} catch (error) {
console.error("Error calculating credit usage:", error);
}
// Adjust total credits used by subtracting coupon value
const adjustedCreditsUsed = Math.max(0, totalCreditsUsed - couponCredits);

View File

@ -5,6 +5,11 @@ import { logtail } from "./logtail";
import { startWebScraperPipeline } from "../main/runWebScraper";
import { callWebhook } from "./webhook";
import { logJob } from "./logging/log_job";
import { initSDK } from '@hyperdx/node-opentelemetry';
if(process.env.ENV === 'production') {
initSDK({ consoleCapture: true, additionalInstrumentations: []});
}
getWebScraperQueue().process(
Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)),

View File

@ -54,7 +54,7 @@ export const testSuiteRateLimiter = new RateLimiterRedis({
export function getRateLimiter(mode: RateLimiterMode, token: string, plan?: string){
// Special test suite case. TODO: Change this later.
if (token.includes("57017")){
if (token.includes("57017") || token.includes("6254cf9")){
return testSuiteRateLimiter;
}
switch (mode) {