diff --git a/.github/archive/publish-rust-sdk.yml b/.github/archive/publish-rust-sdk.yml new file mode 100644 index 00000000..9856bd77 --- /dev/null +++ b/.github/archive/publish-rust-sdk.yml @@ -0,0 +1,42 @@ +name: Publish Rust SDK + +on: [] + +env: + CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} + +jobs: + build-and-publish: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + default: true + profile: minimal + + - name: Install dependencies + run: cargo build --release + + - name: Run version check script + id: version_check_script + run: | + VERSION_INCREMENTED=$(cargo search --limit 1 my_crate_name | grep my_crate_name) + echo "VERSION_INCREMENTED=$VERSION_INCREMENTED" >> $GITHUB_ENV + + - name: Build the package + if: ${{ env.VERSION_INCREMENTED == 'true' }} + run: cargo package + working-directory: ./apps/rust-sdk + + - name: Publish to crates.io + if: ${{ env.VERSION_INCREMENTED == 'true' }} + env: + CARGO_REG_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} + run: cargo publish + working-directory: ./apps/rust-sdk \ No newline at end of file diff --git a/.github/archive/rust-sdk.yml b/.github/archive/rust-sdk.yml new file mode 100644 index 00000000..62deeaab --- /dev/null +++ b/.github/archive/rust-sdk.yml @@ -0,0 +1,61 @@ +name: Run Rust SDK E2E Tests + +on: [] + +env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + BULL_AUTH_KEY: ${{ secrets.BULL_AUTH_KEY }} + FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} + HOST: ${{ secrets.HOST }} + LLAMAPARSE_API_KEY: ${{ secrets.LLAMAPARSE_API_KEY }} + LOGTAIL_KEY: ${{ secrets.LOGTAIL_KEY }} + POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }} + POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }} + NUM_WORKERS_PER_QUEUE: ${{ secrets.NUM_WORKERS_PER_QUEUE }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + PLAYWRIGHT_MICROSERVICE_URL: ${{ secrets.PLAYWRIGHT_MICROSERVICE_URL }} + PORT: ${{ secrets.PORT }} + REDIS_URL: ${{ secrets.REDIS_URL }} + SCRAPING_BEE_API_KEY: ${{ secrets.SCRAPING_BEE_API_KEY }} + SUPABASE_ANON_TOKEN: ${{ secrets.SUPABASE_ANON_TOKEN }} + SUPABASE_SERVICE_TOKEN: ${{ secrets.SUPABASE_SERVICE_TOKEN }} + SUPABASE_URL: ${{ secrets.SUPABASE_URL }} + TEST_API_KEY: ${{ secrets.TEST_API_KEY }} + HYPERDX_API_KEY: ${{ secrets.HYPERDX_API_KEY }} + HDX_NODE_BETA_MODE: 1 + + +jobs: + build: + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Install pnpm + run: npm install -g pnpm + - name: Install dependencies for API + run: pnpm install + working-directory: ./apps/api + - name: Start the application + run: npm start & + working-directory: ./apps/api + id: start_app + - name: Start workers + run: npm run workers & + working-directory: ./apps/api + id: start_workers + - name: Set up Rust + uses: actions/setup-rust@v1 + with: + rust-version: stable + - name: Try the lib build + working-directory: ./apps/rust-sdk + run: cargo build + - name: Run E2E tests for Rust SDK + run: cargo test --test e2e_with_auth diff --git a/.github/scripts/check_version_has_incremented.py b/.github/scripts/check_version_has_incremented.py index e437c934..6dba065f 100644 --- a/.github/scripts/check_version_has_incremented.py +++ b/.github/scripts/check_version_has_incremented.py @@ -15,6 +15,7 @@ false """ import json +import toml import os import re import sys @@ -53,6 +54,19 @@ def get_npm_version(package_name: str) -> str: version = response.json()['version'] return version.strip() +def get_rust_version(file_path: str) -> str: + """Extract version string from Cargo.toml.""" + cargo_toml = toml.load(file_path) + if 'package' in cargo_toml and 'version' in cargo_toml['package']: + return cargo_toml['package']['version'].strip() + raise RuntimeError("Unable to find version string in Cargo.toml.") + +def get_crates_version(package_name: str) -> str: + """Get latest version of Rust package from crates.io.""" + response = requests.get(f"https://crates.io/api/v1/crates/{package_name}") + version = response.json()['crate']['newest_version'] + return version.strip() + def is_version_incremented(local_version: str, published_version: str) -> bool: """Compare local and published versions.""" local_version_parsed: Version = parse_version(local_version) @@ -74,6 +88,12 @@ if __name__ == "__main__": current_version = get_js_version(os.path.join(package_path, 'package.json')) # Get published version from npm published_version = get_npm_version(package_name) + if package_type == "rust": + # Get current version from Cargo.toml + current_version = get_rust_version(os.path.join(package_path, 'Cargo.toml')) + # Get published version from crates.io + published_version = get_crates_version(package_name) + else: raise ValueError("Invalid package type. Use 'python' or 'js'.") diff --git a/.github/scripts/requirements.txt b/.github/scripts/requirements.txt index 0bfc6762..60f8e191 100644 --- a/.github/scripts/requirements.txt +++ b/.github/scripts/requirements.txt @@ -1,2 +1,3 @@ requests -packaging \ No newline at end of file +packaging +toml \ No newline at end of file diff --git a/.github/workflows/fly-direct.yml b/.github/workflows/fly-direct.yml index f846098d..8ec675fa 100644 --- a/.github/workflows/fly-direct.yml +++ b/.github/workflows/fly-direct.yml @@ -1,7 +1,7 @@ name: Fly Deploy Direct on: schedule: - - cron: '0 */6 * * *' + - cron: '0 */2 * * *' env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} @@ -22,16 +22,19 @@ env: SUPABASE_SERVICE_TOKEN: ${{ secrets.SUPABASE_SERVICE_TOKEN }} SUPABASE_URL: ${{ secrets.SUPABASE_URL }} TEST_API_KEY: ${{ secrets.TEST_API_KEY }} + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} jobs: deploy: name: Deploy app runs-on: ubuntu-latest + timeout-minutes: 15 steps: - uses: actions/checkout@v3 - uses: superfly/flyctl-actions/setup-flyctl@master - - run: flyctl deploy --remote-only -a firecrawl-scraper-js + - run: flyctl deploy --remote-only -a firecrawl-scraper-js --build-secret SENTRY_AUTH_TOKEN=$SENTRY_AUTH_TOKEN working-directory: ./apps/api env: FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} BULL_AUTH_KEY: ${{ secrets.BULL_AUTH_KEY }} + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} diff --git a/.github/workflows/fly.yml b/.github/workflows/fly.yml index 9e4b85a8..ba4a099e 100644 --- a/.github/workflows/fly.yml +++ b/.github/workflows/fly.yml @@ -26,6 +26,8 @@ env: PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} jobs: pre-deploy-e2e-tests: @@ -131,7 +133,7 @@ jobs: working-directory: ./apps/python-sdk - name: Run E2E tests for Python SDK run: | - pytest firecrawl/__tests__/e2e_withAuth/test.py + pytest firecrawl/__tests__/v1/e2e_withAuth/test.py working-directory: ./apps/python-sdk js-sdk-tests: @@ -168,7 +170,7 @@ jobs: - name: Run E2E tests for JavaScript SDK run: npm run test working-directory: ./apps/js-sdk/firecrawl - + go-sdk-tests: name: Go SDK Tests needs: pre-deploy-e2e-tests @@ -204,18 +206,54 @@ jobs: run: go test -v ./... -timeout 180s working-directory: ./apps/go-sdk/firecrawl + rust-sdk-tests: + name: Rust SDK Tests + needs: pre-deploy-e2e-tests + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Install pnpm + run: npm install -g pnpm + - name: Install dependencies for API + run: pnpm install + working-directory: ./apps/api + - name: Start the application + run: npm start & + working-directory: ./apps/api + id: start_app + - name: Start workers + run: npm run workers & + working-directory: ./apps/api + id: start_workers + - name: Set up Rust + uses: actions/setup-rust@v1 + with: + rust-version: stable + - name: Try the lib build + working-directory: ./apps/rust-sdk + run: cargo build + - name: Run E2E tests for Rust SDK + run: cargo test --test e2e_with_auth + deploy: name: Deploy app runs-on: ubuntu-latest - needs: [pre-deploy-test-suite, python-sdk-tests, js-sdk-tests] + needs: [pre-deploy-test-suite, python-sdk-tests, js-sdk-tests, rust-sdk-tests] steps: - uses: actions/checkout@v3 - uses: superfly/flyctl-actions/setup-flyctl@master - - run: flyctl deploy --remote-only -a firecrawl-scraper-js + - run: flyctl deploy --remote-only -a firecrawl-scraper-js --build-secret SENTRY_AUTH_TOKEN=$SENTRY_AUTH_TOKEN working-directory: ./apps/api env: FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} BULL_AUTH_KEY: ${{ secrets.BULL_AUTH_KEY }} + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} build-and-publish-python-sdk: name: Build and publish Python SDK @@ -283,7 +321,7 @@ jobs: - name: Install dependencies for JavaScript SDK run: pnpm install working-directory: ./apps/js-sdk/firecrawl - + - name: Run version check script id: version_check_script run: | @@ -297,4 +335,38 @@ jobs: run: | npm run build-and-publish working-directory: ./apps/js-sdk/firecrawl - \ No newline at end of file + build-and-publish-rust-sdk: + name: Build and publish Rust SDK + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + default: true + profile: minimal + + - name: Install dependencies + run: cargo build --release + + - name: Run version check script + id: version_check_script + run: | + VERSION_INCREMENTED=$(cargo search --limit 1 my_crate_name | grep my_crate_name) + echo "VERSION_INCREMENTED=$VERSION_INCREMENTED" >> $GITHUB_ENV + + - name: Build the package + if: ${{ env.VERSION_INCREMENTED == 'true' }} + run: cargo package + working-directory: ./apps/rust-sdk + + - name: Publish to crates.io + if: ${{ env.VERSION_INCREMENTED == 'true' }} + env: + CARGO_REG_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} + run: cargo publish + working-directory: ./apps/rust-sdk \ No newline at end of file diff --git a/.gitignore b/.gitignore index 91b7ef48..9eb551a9 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,5 @@ apps/test-suite/load-test-results/test-run-report.json apps/playwright-service-ts/node_modules/ apps/playwright-service-ts/package-lock.json +*.pyc +.rdb diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..b42c5d23 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "apps/go-sdk/firecrawl-go"] + path = apps/go-sdk/firecrawl-go + url = https://github.com/mendableai/firecrawl-go +[submodule "apps/go-sdk/firecrawl-go-examples"] + path = apps/go-sdk/firecrawl-go-examples + url = https://github.com/mendableai/firecrawl-go-examples diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cece879b..d0145a6b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -44,7 +44,6 @@ BULL_AUTH_KEY= @ LOGTAIL_KEY= # Use if you're configuring basic logging with logtail PLAYWRIGHT_MICROSERVICE_URL= # set if you'd like to run a playwright fallback LLAMAPARSE_API_KEY= #Set if you have a llamaparse key you'd like to use to parse pdfs -SERPER_API_KEY= #Set if you have a serper key you'd like to use as a search api SLACK_WEBHOOK_URL= # set if you'd like to send slack server health status messages POSTHOG_API_KEY= # set if you'd like to send posthog events like job logs POSTHOG_HOST= # set if you'd like to send posthog events like job logs diff --git a/README.md b/README.md index 01324690..89ed0127 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,38 @@ +

+ +

+
+ + License + + + Downloads + + + GitHub Contributors + + + Open Source + + +
+
+

+ + Follow on X + + + Follow on LinkedIn + + + Join our Discord + +

+
+ # 🔥 Firecrawl Crawl and convert any website into LLM-ready markdown or structured data. Built by [Mendable.ai](https://mendable.ai?ref=gfirecrawl) and the Firecrawl community. Includes powerful scraping, crawling and data extraction capabilities. @@ -6,11 +41,13 @@ _This repository is in its early development stages. We are still merging custom ## What is Firecrawl? -[Firecrawl](https://firecrawl.dev?ref=github) is an API service that takes a URL, crawls it, and converts it into clean markdown or structured data. We crawl all accessible subpages and give you clean data for each. No sitemap required. +[Firecrawl](https://firecrawl.dev?ref=github) is an API service that takes a URL, crawls it, and converts it into clean markdown or structured data. We crawl all accessible subpages and give you clean data for each. No sitemap required. Check out our [documentation](https://docs.firecrawl.dev). _Pst. hey, you, join our stargazers :)_ - + + GitHub stars + ## How to use it? @@ -41,18 +78,26 @@ To use the API, you need to sign up on [Firecrawl](https://firecrawl.dev) and ge Used to crawl a URL and all accessible subpages. This submits a crawl job and returns a job ID to check the status of the crawl. ```bash -curl -X POST https://api.firecrawl.dev/v0/crawl \ +curl -X POST https://api.firecrawl.dev/v1/crawl \ -H 'Content-Type: application/json' \ - -H 'Authorization: Bearer YOUR_API_KEY' \ + -H 'Authorization: Bearer fc-YOUR_API_KEY' \ -d '{ - "url": "https://mendable.ai" + "url": "https://docs.firecrawl.dev", + "limit": 100, + "scrapeOptions": { + "formats": ["markdown", "html"] + } }' ``` -Returns a jobId +Returns a crawl job id and the url to check the status of the crawl. ```json -{ "jobId": "1234-5678-9101" } +{ + "success": true, + "id": "123-456-789", + "url": "https://api.firecrawl.dev/v1/crawl/123-456-789" +} ``` ### Check Crawl Job @@ -60,7 +105,7 @@ Returns a jobId Used to check the status of a crawl job and get its result. ```bash -curl -X GET https://api.firecrawl.dev/v0/crawl/status/1234-5678-9101 \ +curl -X GET https://api.firecrawl.dev/v1/crawl/123-456-789 \ -H 'Content-Type: application/json' \ -H 'Authorization: Bearer YOUR_API_KEY' ``` @@ -68,18 +113,20 @@ curl -X GET https://api.firecrawl.dev/v0/crawl/status/1234-5678-9101 \ ```json { "status": "completed", - "current": 22, - "total": 22, + "total": 36, + "creditsUsed": 36, + "expiresAt": "2024-00-00T00:00:00.000Z", "data": [ { - "content": "Raw Content ", - "markdown": "# Markdown Content", - "provider": "web-scraper", + "markdown": "[Firecrawl Docs home page![light logo](https://mintlify.s3-us-west-1.amazonaws.com/firecrawl/logo/light.svg)!...", + "html": "...", "metadata": { - "title": "Mendable | AI for CX and Sales", - "description": "AI for CX and Sales", - "language": null, - "sourceURL": "https://www.mendable.ai/" + "title": "Build a 'Chat with website' using Groq Llama 3 | Firecrawl", + "language": "en", + "sourceURL": "https://docs.firecrawl.dev/learn/rag-llama3", + "description": "Learn how to use Firecrawl, Groq Llama 3, and Langchain to build a 'Chat with your website' bot.", + "ogLocaleAlternate": [], + "statusCode": 200 } } ] @@ -88,14 +135,15 @@ curl -X GET https://api.firecrawl.dev/v0/crawl/status/1234-5678-9101 \ ### Scraping -Used to scrape a URL and get its content. +Used to scrape a URL and get its content in the specified formats. ```bash -curl -X POST https://api.firecrawl.dev/v0/scrape \ +curl -X POST https://api.firecrawl.dev/v1/scrape \ -H 'Content-Type: application/json' \ -H 'Authorization: Bearer YOUR_API_KEY' \ -d '{ - "url": "https://mendable.ai" + "url": "https://docs.firecrawl.dev", + "formats" : ["markdown", "html"] }' ``` @@ -105,68 +153,95 @@ Response: { "success": true, "data": { - "content": "Raw Content ", - "markdown": "# Markdown Content", - "provider": "web-scraper", + "markdown": "Launch Week I is here! [See our Day 2 Release 🚀](https://www.firecrawl.dev/blog/launch-week-i-day-2-doubled-rate-limits)[💥 Get 2 months free...", + "html": " ": { + "type": "string" + }, + "pageStatusCode": { + "type": "integer", + "description": "The status code of the page" + }, + "pageError": { + "type": "string", + "nullable": true, + "description": "The error message of the page" + } + + } + }, + "llm_extraction": { + "type": "object", + "description": "Displayed when using LLM Extraction. Extracted data from the page following the schema defined.", + "nullable": true + }, + "warning": { + "type": "string", + "nullable": true, + "description": "Can be displayed when using LLM Extraction. Warning message will let you know any issues with the extraction." + } + } + } + } + }, + "CrawlStatusResponseObj": { + "type": "object", + "properties": { + "markdown": { + "type": "string" + }, + "content": { + "type": "string" + }, + "html": { + "type": "string", + "nullable": true, + "description": "HTML version of the content on page if `includeHtml` is true" + }, + "rawHtml": { + "type": "string", + "nullable": true, + "description": "Raw HTML content of the page if `includeRawHtml` is true" + }, + "index": { + "type": "integer", + "description": "The number of the page that was crawled. This is useful for `partial_data` so you know which page the data is from." + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + }, + " ": { + "type": "string" + }, + "pageStatusCode": { + "type": "integer", + "description": "The status code of the page" + }, + "pageError": { + "type": "string", + "nullable": true, + "description": "The error message of the page" + } + } + } + } + }, + "SearchResponse": { + "type": "object", + "properties": { + "success": { + "type": "boolean" + }, + "data": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": "string" + }, + "markdown": { + "type": "string" + }, + "content": { + "type": "string" + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + } + } + } + } + } + } + } + }, + "CrawlResponse": { + "type": "object", + "properties": { + "jobId": { + "type": "string" + } + } + } + } + }, + "security": [ + { + "bearerAuth": [] + } + ] +} \ No newline at end of file diff --git a/apps/api/openapi.json b/apps/api/openapi.json index fb0c4305..5bd3e3d8 100644 --- a/apps/api/openapi.json +++ b/apps/api/openapi.json @@ -18,8 +18,8 @@ "paths": { "/scrape": { "post": { - "summary": "Scrape a single URL and optionally extract information using an LLM", - "operationId": "scrapeAndExtractFromUrl", + "summary": "Scrape a single URL", + "operationId": "scrape", "tags": ["Scraping"], "security": [ { @@ -38,94 +38,47 @@ "format": "uri", "description": "The URL to scrape" }, - "pageOptions": { - "type": "object", - "properties": { - "headers": { - "type": "object", - "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." - }, - "includeHtml": { - "type": "boolean", - "description": "Include the HTML version of the content on page. Will output a html key in the response.", - "default": false - }, - "includeRawHtml": { - "type": "boolean", - "description": "Include the raw HTML content of the page. Will output a rawHtml key in the response.", - "default": false - }, - "onlyIncludeTags": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: 'script, .ad, #footer'" - }, - "onlyMainContent": { - "type": "boolean", - "description": "Only return the main content of the page excluding headers, navs, footers, etc.", - "default": false - }, - "removeTags": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" - }, - "replaceAllPathsWithAbsolutePaths": { - "type": "boolean", - "description": "Replace all relative paths with absolute paths for images and links", - "default": false - }, - "screenshot": { - "type": "boolean", - "description": "Include a screenshot of the top of the page that you are scraping.", - "default": false - }, - "fullPageScreenshot": { - "type": "boolean", - "description": "Include a full page screenshot of the page that you are scraping.", - "default": false - }, - "waitFor": { - "type": "integer", - "description": "Wait x amount of milliseconds for the page to load to fetch content", - "default": 0 - } - } + "formats": { + "type": "array", + "items": { + "type": "string", + "enum": ["markdown", "html", "rawHtml", "links", "screenshot", "screenshot@fullPage"] + }, + "description": "Specific formats to return.\n\n - markdown: The page in Markdown format.\n - html: The page's HTML, trimmed to include only meaningful content.\n - rawHtml: The page's original HTML.\n - links: The links on the page.\n - screenshot: A screenshot of the top of the page.\n - screenshot@fullPage: A screenshot of the full page. (overridden by screenshot if present)", + "default": ["markdown"] }, - "extractorOptions": { + "headers": { "type": "object", - "description": "Options for extraction of structured information from the page content. Note: LLM-based extraction is not performed by default and only occurs when explicitly configured. The 'markdown' mode simply returns the scraped markdown and is the default mode for scraping.", - "default": {}, - "properties": { - "mode": { - "type": "string", - "enum": ["markdown", "llm-extraction", "llm-extraction-from-raw-html", "llm-extraction-from-markdown"], - "description": "The extraction mode to use. 'markdown': Returns the scraped markdown content, does not perform LLM extraction. 'llm-extraction': Extracts information from the cleaned and parsed content using LLM. 'llm-extraction-from-raw-html': Extracts information directly from the raw HTML using LLM. 'llm-extraction-from-markdown': Extracts information from the markdown content using LLM." - }, - "extractionPrompt": { - "type": "string", - "description": "A prompt describing what information to extract from the page, applicable for LLM extraction modes." - }, - "extractionSchema": { - "type": "object", - "additionalProperties": true, - "description": "The schema for the data to be extracted, required only for LLM extraction modes.", - "required": [ - "company_mission", - "supports_sso", - "is_open_source" - ] - } - } + "description": "Headers to send with the request. Can be used to send cookies, user-agent, etc." + }, + "includeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Only include tags, classes and ids from the page in the final output. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "excludeTags": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tags, classes and ids to remove from the page. Use comma separated values. Example: 'script, .ad, #footer'" + }, + "onlyMainContent": { + "type": "boolean", + "description": "Only return the main content of the page excluding headers, navs, footers, etc.", + "default": true }, "timeout": { "type": "integer", "description": "Timeout in milliseconds for the request", "default": 30000 + }, + "waitFor": { + "type": "integer", + "description": "Wait x amount of milliseconds for the page to load to fetch content", + "default": 0 } }, "required": ["url"] @@ -741,24 +694,42 @@ "success": { "type": "boolean" }, + "warning": { + "type": "string", + "nullable": true, + "description": "Warning message to let you know of any issues." + }, "data": { "type": "object", "properties": { "markdown": { - "type": "string" - }, - "content": { - "type": "string" + "type": "string", + "nullable": true, + "description": "Markdown content of the page if the `markdown` format was specified (default)" }, "html": { "type": "string", "nullable": true, - "description": "HTML version of the content on page if `includeHtml` is true" + "description": "HTML version of the content on page if the `html` format was specified" }, "rawHtml": { "type": "string", "nullable": true, - "description": "Raw HTML content of the page if `includeRawHtml` is true" + "description": "Raw HTML content of the page if the `rawHtml` format was specified" + }, + "links": { + "type": "array", + "items": { + "type": "string", + "format": "uri" + }, + "nullable": true, + "description": "Links on the page if the `links` format was specified" + }, + "screenshot": { + "type": "string", + "nullable": true, + "description": "URL of the screenshot of the page if the `screenshot` or `screenshot@fullSize` format was specified" }, "metadata": { "type": "object", @@ -780,27 +751,16 @@ " ": { "type": "string" }, - "pageStatusCode": { + "statusCode": { "type": "integer", "description": "The status code of the page" }, - "pageError": { + "error": { "type": "string", "nullable": true, "description": "The error message of the page" } - } - }, - "llm_extraction": { - "type": "object", - "description": "Displayed when using LLM Extraction. Extracted data from the page following the schema defined.", - "nullable": true - }, - "warning": { - "type": "string", - "nullable": true, - "description": "Can be displayed when using LLM Extraction. Warning message will let you know any issues with the extraction." } } } @@ -810,24 +770,33 @@ "type": "object", "properties": { "markdown": { - "type": "string" - }, - "content": { - "type": "string" + "type": "string", + "nullable": true, + "description": "Markdown content of the page if the `markdown` format was specified (default)" }, "html": { "type": "string", "nullable": true, - "description": "HTML version of the content on page if `includeHtml` is true" + "description": "HTML version of the content on page if the `html` format was specified" }, "rawHtml": { "type": "string", "nullable": true, - "description": "Raw HTML content of the page if `includeRawHtml` is true" + "description": "Raw HTML content of the page if the `rawHtml` format was specified" }, - "index": { - "type": "integer", - "description": "The number of the page that was crawled. This is useful for `partial_data` so you know which page the data is from." + "links": { + "type": "array", + "items": { + "type": "string", + "format": "uri" + }, + "nullable": true, + "description": "Links on the page if the `links` format was specified" + }, + "screenshot": { + "type": "string", + "nullable": true, + "description": "URL of the screenshot of the page if the `screenshot` or `screenshot@fullSize` format was specified" }, "metadata": { "type": "object", @@ -849,11 +818,11 @@ " ": { "type": "string" }, - "pageStatusCode": { + "statusCode": { "type": "integer", "description": "The status code of the page" }, - "pageError": { + "error": { "type": "string", "nullable": true, "description": "The error message of the page" @@ -871,34 +840,63 @@ "data": { "type": "array", "items": { - "type": "object", - "properties": { - "url": { - "type": "string" + "markdown": { + "type": "string", + "nullable": true, + "description": "Markdown content of the page if the `markdown` format was specified (default)" + }, + "html": { + "type": "string", + "nullable": true, + "description": "HTML version of the content on page if the `html` format was specified" + }, + "rawHtml": { + "type": "string", + "nullable": true, + "description": "Raw HTML content of the page if the `rawHtml` format was specified" + }, + "links": { + "type": "array", + "items": { + "type": "string", + "format": "uri" }, - "markdown": { - "type": "string" - }, - "content": { - "type": "string" - }, - "metadata": { - "type": "object", - "properties": { - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "language": { - "type": "string", - "nullable": true - }, - "sourceURL": { - "type": "string", - "format": "uri" - } + "nullable": true, + "description": "Links on the page if the `links` format was specified" + }, + "screenshot": { + "type": "string", + "nullable": true, + "description": "URL of the screenshot of the page if the `screenshot` or `screenshot@fullSize` format was specified" + }, + "metadata": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "language": { + "type": "string", + "nullable": true + }, + "sourceURL": { + "type": "string", + "format": "uri" + }, + " ": { + "type": "string" + }, + "statusCode": { + "type": "integer", + "description": "The status code of the page" + }, + "error": { + "type": "string", + "nullable": true, + "description": "The error message of the page" } } } @@ -909,8 +907,15 @@ "CrawlResponse": { "type": "object", "properties": { - "jobId": { + "success": { + "type": "boolean" + }, + "id": { "type": "string" + }, + "url": { + "type": "string", + "format": "uri" } } } diff --git a/apps/api/package.json b/apps/api/package.json index 72235176..bac13e79 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -9,7 +9,8 @@ "format": "prettier --write \"src/**/*.(js|ts)\"", "flyio": "node dist/src/index.js", "start:dev": "nodemon --exec ts-node src/index.ts", - "build": "tsc", + "build": "tsc && pnpm sentry:sourcemaps", + "build:nosentry": "tsc", "test": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_noAuth/*'", "test:local-no-auth": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='src/__tests__/e2e_withAuth/*'", "test:full": "npx jest --detectOpenHandles --forceExit --openHandlesTimeout=120000 --watchAll=false --testPathIgnorePatterns='(src/__tests__/e2e_noAuth|src/__tests__/e2e_withAuth)'", @@ -19,8 +20,9 @@ "mongo-docker": "docker run -d -p 2717:27017 -v ./mongo-data:/data/db --name mongodb mongo:latest", "mongo-docker-console": "docker exec -it mongodb mongosh", "run-example": "npx ts-node src/example.ts", - "deploy:fly": "flyctl deploy", - "deploy:fly:staging": "fly deploy -c fly.staging.toml" + "deploy:fly": "flyctl deploy --build-secret SENTRY_AUTH_TOKEN=$(dotenv -p SENTRY_AUTH_TOKEN)", + "deploy:fly:staging": "fly deploy -c fly.staging.toml", + "sentry:sourcemaps": "sentry-cli sourcemaps inject --org caleb-peffer --project firecrawl-scraper-js ./dist && sentry-cli sourcemaps upload --org caleb-peffer --project firecrawl-scraper-js ./dist" }, "author": "", "license": "ISC", @@ -29,7 +31,6 @@ "@jest/globals": "^29.7.0", "@tsconfig/recommended": "^1.0.3", "@types/body-parser": "^1.19.2", - "@types/bull": "^4.10.0", "@types/cors": "^2.8.13", "@types/express": "^4.17.17", "@types/jest": "^29.5.12", @@ -53,17 +54,21 @@ "@bull-board/express": "^5.20.5", "@devil7softwares/pos": "^1.0.2", "@dqbd/tiktoken": "^1.0.13", - "@hyperdx/node-opentelemetry": "^0.8.0", + "@hyperdx/node-opentelemetry": "^0.8.1", "@logtail/node": "^0.4.12", "@nangohq/node": "^0.40.8", - "@sentry/node": "^8.13.0", + "@sentry/cli": "^2.33.1", + "@sentry/node": "^8.26.0", + "@sentry/profiling-node": "^8.26.0", "@supabase/supabase-js": "^2.44.2", + "@types/express-ws": "^3.0.4", + "@types/ws": "^8.5.12", "ajv": "^8.16.0", "async": "^3.2.5", "async-mutex": "^0.5.0", "axios": "^1.3.4", "bottleneck": "^2.19.5", - "bull": "^4.15.0", + "bullmq": "^5.11.0", "cacheable-lookup": "^6.1.0", "cheerio": "^1.0.0-rc.12", "cohere": "^1.1.1", @@ -71,7 +76,9 @@ "cron-parser": "^4.9.0", "date-fns": "^3.6.0", "dotenv": "^16.3.1", + "dotenv-cli": "^7.4.2", "express-rate-limit": "^7.3.1", + "express-ws": "^5.0.2", "form-data": "^4.0.0", "glob": "^10.4.2", "gpt3-tokenizer": "^1.1.5", @@ -87,7 +94,7 @@ "moment": "^2.29.4", "mongoose": "^8.4.4", "natural": "^7.0.7", - "openai": "^4.52.2", + "openai": "^4.57.0", "pdf-parse": "^1.1.1", "pos": "^0.4.2", "posthog-node": "^4.0.1", @@ -99,14 +106,16 @@ "robots-parser": "^3.0.1", "scrapingbee": "^1.7.4", "stripe": "^16.1.0", + "systeminformation": "^5.22.11", "turndown": "^7.1.3", "turndown-plugin-gfm": "^1.0.2", "typesense": "^1.5.4", "unstructured-client": "^0.11.3", "uuid": "^10.0.0", "wordpos": "^2.1.0", + "ws": "^8.18.0", "xml2js": "^0.6.2", - "zod": "^3.23.4", + "zod": "^3.23.8", "zod-to-json-schema": "^3.23.1" }, "nodemonConfig": { @@ -116,4 +125,4 @@ "temp" ] } -} +} \ No newline at end of file diff --git a/apps/api/pnpm-lock.yaml b/apps/api/pnpm-lock.yaml index ad0e577c..2762a84c 100644 --- a/apps/api/pnpm-lock.yaml +++ b/apps/api/pnpm-lock.yaml @@ -27,20 +27,32 @@ importers: specifier: ^1.0.13 version: 1.0.15 '@hyperdx/node-opentelemetry': - specifier: ^0.8.0 - version: 0.8.0 + specifier: ^0.8.1 + version: 0.8.1 '@logtail/node': specifier: ^0.4.12 version: 0.4.21 '@nangohq/node': specifier: ^0.40.8 version: 0.40.8 + '@sentry/cli': + specifier: ^2.33.1 + version: 2.33.1 '@sentry/node': - specifier: ^8.13.0 - version: 8.13.0 + specifier: ^8.26.0 + version: 8.26.0 + '@sentry/profiling-node': + specifier: ^8.26.0 + version: 8.26.0 '@supabase/supabase-js': specifier: ^2.44.2 version: 2.44.2 + '@types/express-ws': + specifier: ^3.0.4 + version: 3.0.4 + '@types/ws': + specifier: ^8.5.12 + version: 8.5.12 ajv: specifier: ^8.16.0 version: 8.16.0 @@ -56,9 +68,9 @@ importers: bottleneck: specifier: ^2.19.5 version: 2.19.5 - bull: - specifier: ^4.15.0 - version: 4.15.0 + bullmq: + specifier: ^5.11.0 + version: 5.11.0 cacheable-lookup: specifier: ^6.1.0 version: 6.1.0 @@ -80,9 +92,15 @@ importers: dotenv: specifier: ^16.3.1 version: 16.4.5 + dotenv-cli: + specifier: ^7.4.2 + version: 7.4.2 express-rate-limit: specifier: ^7.3.1 version: 7.3.1(express@4.19.2) + express-ws: + specifier: ^5.0.2 + version: 5.0.2(express@4.19.2) form-data: specifier: ^4.0.0 version: 4.0.0 @@ -106,7 +124,7 @@ importers: version: 0.0.28 langchain: specifier: ^0.2.8 - version: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1) + version: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0) languagedetect: specifier: ^2.0.0 version: 2.0.0 @@ -129,8 +147,8 @@ importers: specifier: ^7.0.7 version: 7.0.7(socks@2.8.3) openai: - specifier: ^4.52.2 - version: 4.52.2 + specifier: ^4.57.0 + version: 4.57.0(zod@3.23.8) pdf-parse: specifier: ^1.1.1 version: 1.1.1 @@ -164,6 +182,9 @@ importers: stripe: specifier: ^16.1.0 version: 16.1.0 + systeminformation: + specifier: ^5.22.11 + version: 5.22.11 turndown: specifier: ^7.1.3 version: 7.2.0 @@ -182,11 +203,14 @@ importers: wordpos: specifier: ^2.1.0 version: 2.1.0 + ws: + specifier: ^8.18.0 + version: 8.18.0 xml2js: specifier: ^0.6.2 version: 0.6.2 zod: - specifier: ^3.23.4 + specifier: ^3.23.8 version: 3.23.8 zod-to-json-schema: specifier: ^3.23.1 @@ -204,9 +228,6 @@ importers: '@types/body-parser': specifier: ^1.19.2 version: 1.19.5 - '@types/bull': - specifier: ^4.10.0 - version: 4.10.0 '@types/cors': specifier: ^2.8.13 version: 2.8.17 @@ -499,8 +520,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@hyperdx/node-opentelemetry@0.8.0': - resolution: {integrity: sha512-2z1jQqg2czctHOgo17WETUJOX2BJJ2jqg50R/z4o4ADRCS7Ynp4n3eVMLtsJHypQeDdrInUDE0VtVoXN5b+6hw==} + '@hyperdx/node-opentelemetry@0.8.1': + resolution: {integrity: sha512-wNw0yQf54j/9KXVWeEOu8G6C5FT5EFlrz4dcmscTkwCvo6fQOLRZa/NbGcqugt0LSFMc0/6/Q5RDWVqDpEn0LQ==} hasBin: true '@ioredis/commands@1.2.0': @@ -803,8 +824,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-connect@0.37.0': - resolution: {integrity: sha512-SeQktDIH5rNzjiEiazWiJAIXkmnLOnNV7wwHpahrqE0Ph+Z3heqMfxRtoMtbdJSIYLfcNZYO51AjxZ00IXufdw==} + '@opentelemetry/instrumentation-connect@0.38.0': + resolution: {integrity: sha512-2/nRnx3pjYEmdPIaBwtgtSviTKHWnDZN3R+TkRUnhIVrvBKVcq+I5B2rtd6mr6Fe9cHlZ9Ojcuh7pkNh/xdWWg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -833,8 +854,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-express@0.40.1': - resolution: {integrity: sha512-+RKMvVe2zw3kIXRup9c1jFu3T4d0fs5aKy015TpiMyoCKX1UMu3Z0lfgYtuyiSTANvg5hZnDbWmQmqSPj9VTvg==} + '@opentelemetry/instrumentation-express@0.41.1': + resolution: {integrity: sha512-uRx0V3LPGzjn2bxAnV8eUsDT82vT7NTwI0ezEuPMBOTOsnPpGhWdhcdNdhH80sM4TrWrOfXm9HGEdfWE3TRIww==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -845,8 +866,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-fastify@0.37.0': - resolution: {integrity: sha512-WRjwzNZgupSzbEYvo9s+QuHJRqZJjVdNxSEpGBwWK8RKLlHGwGVAu0gcc2gPamJWUJsGqPGvahAPWM18ZkWj6A==} + '@opentelemetry/instrumentation-fastify@0.38.0': + resolution: {integrity: sha512-HBVLpTSYpkQZ87/Df3N0gAw7VzYZV3n28THIBrJWfuqw3Or7UqdhnjeuMIPQ04BKk3aZc0cWn2naSQObbh5vXw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -857,6 +878,12 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation-fs@0.14.0': + resolution: {integrity: sha512-pVc8P5AgliC1DphyyBUgsxXlm2XaPH4BpYvt7rAZDMIqUpRk8gs19SioABtKqqxvFzg5jPtgJfJsdxq0Y+maLw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation-generic-pool@0.36.0': resolution: {integrity: sha512-CExAEqJvK8jYxrhN8cl6EaGg57EGJi+qsSKouLC5lndXi68gZLOKbZIMZg4pF0kNfp/D4BFaGmA6Ap7d5WoPTw==} engines: {node: '>=14'} @@ -869,8 +896,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-graphql@0.41.0': - resolution: {integrity: sha512-R/gXeljgIhaRDKquVkKYT5QHPnFouM8ooyePZEP0kqyaVAedtR1V7NfAUJbxfTG5fBQa5wdmLjvu63+tzRXZCA==} + '@opentelemetry/instrumentation-graphql@0.42.0': + resolution: {integrity: sha512-N8SOwoKL9KQSX7z3gOaw5UaTeVQcfDO1c21csVHnmnmGUoqsXbArK2B8VuwPWcv6/BC/i3io+xTo7QGRZ/z28Q==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -887,8 +914,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-hapi@0.39.0': - resolution: {integrity: sha512-ik2nA9Yj2s2ay+aNY+tJsKCsEx6Tsc2g/MK0iWBW5tibwrWKTy1pdVt5sB3kd5Gkimqj23UV5+FH2JFcQLeKug==} + '@opentelemetry/instrumentation-hapi@0.40.0': + resolution: {integrity: sha512-8U/w7Ifumtd2bSN1OLaSwAAFhb9FyqWUki3lMMB0ds+1+HdSxYBe9aspEJEgvxAqOkrQnVniAPTEGf1pGM7SOw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -911,8 +938,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-ioredis@0.41.0': - resolution: {integrity: sha512-rxiLloU8VyeJGm5j2fZS8ShVdB82n7VNP8wTwfUQqDwRfHCnkzGr+buKoxuhGD91gtwJ91RHkjHA1Eg6RqsUTg==} + '@opentelemetry/instrumentation-ioredis@0.42.0': + resolution: {integrity: sha512-P11H168EKvBB9TUSasNDOGJCSkpT44XgoM6d3gRIWAa9ghLpYhl0uRkS8//MqPzcJVHr3h3RmfXIpiYLjyIZTw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -929,8 +956,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-koa@0.41.0': - resolution: {integrity: sha512-mbPnDt7ELvpM2S0vixYUsde7122lgegLOJQxx8iJQbB8YHal/xnTh9v7IfArSVzIDo+E+080hxZyUZD4boOWkw==} + '@opentelemetry/instrumentation-koa@0.42.0': + resolution: {integrity: sha512-H1BEmnMhho8o8HuNRq5zEI4+SIHDIglNB7BPKohZyWG4fWNuR7yM4GTlR01Syq21vODAS7z5omblScJD/eZdKw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -953,8 +980,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mongodb@0.45.0': - resolution: {integrity: sha512-xnZP9+ayeB1JJyNE9cIiwhOJTzNEsRhXVdLgfzmrs48Chhhk026mQdM5CITfyXSCfN73FGAIB8d91+pflJEfWQ==} + '@opentelemetry/instrumentation-mongodb@0.46.0': + resolution: {integrity: sha512-VF/MicZ5UOBiXrqBslzwxhN7TVqzu1/LN/QDpkskqM0Zm0aZ4CVRbUygL8d7lrjLn15x5kGIe8VsSphMfPJzlA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -965,8 +992,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mongoose@0.39.0': - resolution: {integrity: sha512-J1r66A7zJklPPhMtrFOO7/Ud2p0Pv5u8+r23Cd1JUH6fYPmftNJVsLp2urAt6PHK4jVqpP/YegN8wzjJ2mZNPQ==} + '@opentelemetry/instrumentation-mongoose@0.40.0': + resolution: {integrity: sha512-niRi5ZUnkgzRhIGMOozTyoZIvJKNJyhijQI4nF4iFSb+FUx2v5fngfR+8XLmdQAO7xmsD8E5vEGdDVYVtKbZew==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -977,8 +1004,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mysql2@0.39.0': - resolution: {integrity: sha512-Iypuq2z6TCfriAXCIZjRq8GTFCKhQv5SpXbmI+e60rYdXw8NHtMH4NXcGF0eKTuoCsC59IYSTUvDQYDKReaszA==} + '@opentelemetry/instrumentation-mysql2@0.40.0': + resolution: {integrity: sha512-0xfS1xcqUmY7WE1uWjlmI67Xg3QsSUlNT+AcXHeA4BDUPwZtWqF4ezIwLgpVZfHOnkAEheqGfNSWd1PIu3Wnfg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -989,8 +1016,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mysql@0.39.0': - resolution: {integrity: sha512-8snHPh83rhrDf31v9Kq0Nf+ts8hdr7NguuszRqZomZBHgE0+UyXZSkXHAAFZoBPPRMGyM68uaFE5hVtFl+wOcA==} + '@opentelemetry/instrumentation-mysql@0.40.0': + resolution: {integrity: sha512-d7ja8yizsOCNMYIJt5PH/fKZXjb/mS48zLROO4BzZTtDfhNCl2UM/9VIomP2qkGIFVouSJrGr/T00EzY7bPtKA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -1001,8 +1028,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-nestjs-core@0.38.0': - resolution: {integrity: sha512-M381Df1dM8aqihZz2yK+ugvMFK5vlHG/835dc67Sx2hH4pQEQYDA2PpFPTgc9AYYOydQaj7ClFQunESimjXDgg==} + '@opentelemetry/instrumentation-nestjs-core@0.39.0': + resolution: {integrity: sha512-mewVhEXdikyvIZoMIUry8eb8l3HUjuQjSjVbmLVTt4NQi35tkpnHQrG9bTRBrl3403LoWZ2njMPJyg4l6HfKvA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -1019,8 +1046,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-pg@0.42.0': - resolution: {integrity: sha512-sjgcM8CswYy8zxHgXv4RAZ09DlYhQ+9TdlourUs63Df/ek5RrB1ZbjznqW7PB6c3TyJJmX6AVtPTjAsROovEjA==} + '@opentelemetry/instrumentation-pg@0.43.0': + resolution: {integrity: sha512-og23KLyoxdnAeFs1UWqzSonuCkePUzCX30keSYigIzJe/6WSYA8rnEI5lobcxPEzg+GcU06J7jzokuEHbjVJNw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -1037,8 +1064,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-redis-4@0.40.0': - resolution: {integrity: sha512-0ieQYJb6yl35kXA75LQUPhHtGjtQU9L85KlWa7d4ohBbk/iQKZ3X3CFl5jC5vNMq/GGPB3+w3IxNvALlHtrp7A==} + '@opentelemetry/instrumentation-redis-4@0.41.0': + resolution: {integrity: sha512-H7IfGTqW2reLXqput4yzAe8YpDC0fmVNal95GHMLOrS89W+qWUKIqxolSh63hJyfmwPSFwXASzj7wpSk8Az+Dg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -1091,8 +1118,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation@0.43.0': - resolution: {integrity: sha512-S1uHE+sxaepgp+t8lvIDuRgyjJWisAb733198kwQTUc9ZtYQ2V2gmyCtR1x21ePGVLoMiX/NWY7WA290hwkjJQ==} + '@opentelemetry/instrumentation@0.46.0': + resolution: {integrity: sha512-a9TijXZZbk0vI5TGLZl+0kxyFfrXHhX6Svtz7Pp2/VBlCSKrazuULEyoJQrOknJyFWNMEmbbJgOciHCCpQcisw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -1270,8 +1297,8 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@prisma/instrumentation@5.16.0': - resolution: {integrity: sha512-MVzNRW2ikWvVNnMIEgQMcwWxpFD+XF2U2h0Qz7MjutRqJxrhWexWV2aSi2OXRaU8UL5wzWw7pnjdKUzYhWauLg==} + '@prisma/instrumentation@5.17.0': + resolution: {integrity: sha512-c1Sle4ji8aasMcYfBBHFM56We4ljfenVtRmS8aY06BllS7SoU6SmJBwG7vil+GHiR0Yrh+t9iBwt4AY0Jr4KNQ==} '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -1344,16 +1371,66 @@ packages: '@selderee/plugin-htmlparser2@0.11.0': resolution: {integrity: sha512-P33hHGdldxGabLFjPPpaTxVolMrzrcegejx+0GxjrIb9Zv48D8yAIA/QTDR2dFl7Uz7urX8aX6+5bCZslr+gWQ==} + '@sentry/cli-darwin@2.33.1': + resolution: {integrity: sha512-+4/VIx/E1L2hChj5nGf5MHyEPHUNHJ/HoG5RY+B+vyEutGily1c1+DM2bum7RbD0xs6wKLIyup5F02guzSzG8A==} + engines: {node: '>=10'} + os: [darwin] + + '@sentry/cli-linux-arm64@2.33.1': + resolution: {integrity: sha512-DbGV56PRKOLsAZJX27Jt2uZ11QfQEMmWB4cIvxkKcFVE+LJP4MVA+MGGRUL6p+Bs1R9ZUuGbpKGtj0JiG6CoXw==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux, freebsd] + + '@sentry/cli-linux-arm@2.33.1': + resolution: {integrity: sha512-zbxEvQju+tgNvzTOt635le4kS/Fbm2XC2RtYbCTs034Vb8xjrAxLnK0z1bQnStUV8BkeBHtsNVrG+NSQDym2wg==} + engines: {node: '>=10'} + cpu: [arm] + os: [linux, freebsd] + + '@sentry/cli-linux-i686@2.33.1': + resolution: {integrity: sha512-g2LS4oPXkPWOfKWukKzYp4FnXVRRSwBxhuQ9eSw2peeb58ZIObr4YKGOA/8HJRGkooBJIKGaAR2mH2Pk1TKaiA==} + engines: {node: '>=10'} + cpu: [x86, ia32] + os: [linux, freebsd] + + '@sentry/cli-linux-x64@2.33.1': + resolution: {integrity: sha512-IV3dcYV/ZcvO+VGu9U6kuxSdbsV2kzxaBwWUQxtzxJ+cOa7J8Hn1t0koKGtU53JVZNBa06qJWIcqgl4/pCuKIg==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux, freebsd] + + '@sentry/cli-win32-i686@2.33.1': + resolution: {integrity: sha512-F7cJySvkpzIu7fnLKNHYwBzZYYwlhoDbAUnaFX0UZCN+5DNp/5LwTp37a5TWOsmCaHMZT4i9IO4SIsnNw16/zQ==} + engines: {node: '>=10'} + cpu: [x86, ia32] + os: [win32] + + '@sentry/cli-win32-x64@2.33.1': + resolution: {integrity: sha512-8VyRoJqtb2uQ8/bFRKNuACYZt7r+Xx0k2wXRGTyH05lCjAiVIXn7DiS2BxHFty7M1QEWUCMNsb/UC/x/Cu2wuA==} + engines: {node: '>=10'} + cpu: [x64] + os: [win32] + + '@sentry/cli@2.33.1': + resolution: {integrity: sha512-dUlZ4EFh98VFRPJ+f6OW3JEYQ7VvqGNMa0AMcmvk07ePNeK/GicAWmSQE4ZfJTTl80ul6HZw1kY01fGQOQlVRA==} + engines: {node: '>= 10'} + hasBin: true + '@sentry/core@8.13.0': resolution: {integrity: sha512-N9Qg4ZGxZWp8eb2eUUHVVKgjBLtFIjS805nG92s6yJmkvOpKm6mLtcUaT/iDf3Hta6nG+xRkhbE3r+Z4cbXG8w==} engines: {node: '>=14.18'} - '@sentry/node@8.13.0': - resolution: {integrity: sha512-OeZ7K90RhyxfwfreerIi4cszzHrPRRH36STJno2+p3sIGbG5VScOccqXzYEOAqHpByxnti4KQN34BLAT2BFOEA==} + '@sentry/core@8.26.0': + resolution: {integrity: sha512-g/tVmTZD4GNbLFf++hKJfBpcCAtduFEMLnbfa9iT/QEZjlmP+EzY+GsH9bafM5VsNe8DiOUp+kJKWtShzlVdBA==} engines: {node: '>=14.18'} - '@sentry/opentelemetry@8.13.0': - resolution: {integrity: sha512-NYn/HNE/SxFXe8pfnxJknhrrRzYRMHNssCoi5M1CeR5G7F2BGxxVmaGsd8j0WyTCpUS4i97G4vhYtDGxHvWN6w==} + '@sentry/node@8.26.0': + resolution: {integrity: sha512-N9mNLzicnfGgsq6P10ckPdTzEFusjTC7gpqPopwq5eEMF7g798hH8CcE5o6FZ4iAAR3vWliAR/jgccdoMmJMpQ==} + engines: {node: '>=14.18'} + + '@sentry/opentelemetry@8.26.0': + resolution: {integrity: sha512-HBDheM/+ysfIz8R1OH4bBIxdgD7ZbQkKLJAUXkdAbBcfbpK/CTtwcplbauF5wY7Q+GYvwL/ShuDwvXRfW+gFyQ==} engines: {node: '>=14.18'} peerDependencies: '@opentelemetry/api': ^1.9.0 @@ -1362,14 +1439,27 @@ packages: '@opentelemetry/sdk-trace-base': ^1.25.1 '@opentelemetry/semantic-conventions': ^1.25.1 + '@sentry/profiling-node@8.26.0': + resolution: {integrity: sha512-yGHFoqSKe5j9fDK9n5ntJxDyZnedwjCm6fAXwIlsLJOUBqn5g7l8V1XgBPlCJLZzOG0fbvGvSo4WyBfDoSD8vQ==} + engines: {node: '>=14.18'} + hasBin: true + '@sentry/types@8.13.0': resolution: {integrity: sha512-r63s/H5gvQnQM9tTGBXz2xErUbxZALh4e2Lg/1aHj4zIvGLBjA2z5qWsh6TEZYbpmgAyGShLDr6+rWeUVf9yBQ==} engines: {node: '>=14.18'} + '@sentry/types@8.26.0': + resolution: {integrity: sha512-zKmh6SWsJh630rpt7a9vP4Cm4m1C2gDTUqUiH565CajCL/4cePpNWYrNwalSqsOSL7B9OrczA1+n6a6XvND+ng==} + engines: {node: '>=14.18'} + '@sentry/utils@8.13.0': resolution: {integrity: sha512-PxV0v9VbGWH9zP37P5w2msLUFDr287nYjoY2XVF+RSolyiTs1CQNI5ZMUO3o4MsSac/dpXxjyrZXQd72t/jRYA==} engines: {node: '>=14.18'} + '@sentry/utils@8.26.0': + resolution: {integrity: sha512-xvlPU9Hd2BlyT+FhWHGNwnxWqdVRk2AHnDtVcW4Ma0Ri5EwS+uy4Jeik5UkSv8C5RVb9VlxFmS8LN3I1MPJsLw==} + engines: {node: '>=14.18'} + '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} @@ -1538,10 +1628,6 @@ packages: '@types/body-parser@1.19.5': resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==} - '@types/bull@4.10.0': - resolution: {integrity: sha512-RkYW8K2H3J76HT6twmHYbzJ0GtLDDotpLP9ah9gtiA7zfF6peBH1l5fEiK0oeIZ3/642M7Jcb9sPmor8Vf4w6g==} - deprecated: This is a stub types definition. bull provides its own type definitions, so you do not need this installed. - '@types/bunyan@1.8.9': resolution: {integrity: sha512-ZqS9JGpBxVOvsawzmVt30sP++gSQMTejCkIAQ3VdadOcRE8izTyW66hufvwLeH+YEGP6Js2AW7Gz+RMyvrEbmw==} @@ -1563,6 +1649,9 @@ packages: '@types/express-serve-static-core@4.19.3': resolution: {integrity: sha512-KOzM7MhcBFlmnlr/fzISFF5vGWVSvN6fTd4T+ExOt08bA/dA5kpSzY52nMsI1KDFmUREpJelPYyuslLRSjjgCg==} + '@types/express-ws@3.0.4': + resolution: {integrity: sha512-Yjj18CaivG5KndgcvzttWe8mPFinPCHJC2wvyQqVzA7hqeufM8EtWMj6mpp5omg3s8XALUexhOu8aXAyi/DyJQ==} + '@types/express@4.17.21': resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} @@ -1665,8 +1754,8 @@ packages: '@types/whatwg-url@11.0.5': resolution: {integrity: sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==} - '@types/ws@8.5.10': - resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} + '@types/ws@8.5.12': + resolution: {integrity: sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ==} '@types/yargs-parser@21.0.3': resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} @@ -1723,6 +1812,10 @@ packages: afinn-165@1.0.4: resolution: {integrity: sha512-7+Wlx3BImrK0HiG6y3lU4xX7SpBPSSu8T9iguPMlaueRFxjbYwAQrp9lqZUuFikqKbd/en8lVREILvP2J80uJA==} + agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + agent-base@7.1.1: resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==} engines: {node: '>= 14'} @@ -1938,9 +2031,8 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bull@4.15.0: - resolution: {integrity: sha512-nOEAfUXwUXtFbRPQP3bWCwpQ/NAerAu2Nym/ucv5C1E+Qh2x6RGdKKsYIfZam4mYncayTynTUN/HLhRgGi2N8w==} - engines: {node: '>=12'} + bullmq@5.11.0: + resolution: {integrity: sha512-qVzyWGZqie3VHaYEgRXhId/j8ebfmj6MExEJyUByMsUJA5pVciVle3hKLer5fyMwtQ8lTMP7GwhXV/NZ+HzlRA==} bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} @@ -2293,6 +2385,14 @@ packages: domutils@3.1.0: resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + dotenv-cli@7.4.2: + resolution: {integrity: sha512-SbUj8l61zIbzyhIbg0FwPJq6+wjbzdn9oEtozQpZ6kW2ihCcapKVZj49oCT3oPM+mgQm+itgvUQcG5szxVrZTA==} + hasBin: true + + dotenv-expand@10.0.0: + resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} + engines: {node: '>=12'} + dotenv@16.4.5: resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} engines: {node: '>=12'} @@ -2421,6 +2521,12 @@ packages: peerDependencies: express: 4 || 5 || ^5.0.0-beta.1 + express-ws@5.0.2: + resolution: {integrity: sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==} + engines: {node: '>=4.5.0'} + peerDependencies: + express: ^4.0.0 || ^5.0.0-alpha.1 + express@4.19.2: resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} engines: {node: '>= 0.10.0'} @@ -2562,10 +2668,6 @@ packages: resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} engines: {node: '>=8.0.0'} - get-port@5.1.1: - resolution: {integrity: sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==} - engines: {node: '>=8'} - get-stream@5.2.0: resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==} engines: {node: '>=8'} @@ -2672,6 +2774,10 @@ packages: engines: {node: '>=12'} hasBin: true + https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + https-proxy-agent@7.0.4: resolution: {integrity: sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==} engines: {node: '>= 14'} @@ -2708,15 +2814,15 @@ packages: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} engines: {node: '>=6'} - import-in-the-middle@1.4.2: - resolution: {integrity: sha512-9WOz1Yh/cvO/p69sxRmhyQwrIGGSp7EIdcb+fFNVi7CzQGQB8U1/1XrKVSbEd/GNOAeM0peJtmi7+qphe7NvAw==} + import-in-the-middle@1.11.0: + resolution: {integrity: sha512-5DimNQGoe0pLUHbR9qK84iWaWjjbsxiqXnw6Qz64+azRgleqv9k2kTt5fw7QsOpmaGYtuxxursnPPsnTKEx10Q==} + + import-in-the-middle@1.7.1: + resolution: {integrity: sha512-1LrZPDtW+atAxH42S6288qyDFNQ2YCty+2mxEPRtfazH6Z5QwkaBSTS2ods7hnVJioF6rkRfNoA6A/MstpFXLg==} import-in-the-middle@1.7.4: resolution: {integrity: sha512-Lk+qzWmiQuRPPulGQeK5qq0v32k2bHnWrRPFgqyvhw7Kkov5L6MOLOIU3pcWeujc9W4q54Cp3Q2WV16eQkc7Bg==} - import-in-the-middle@1.8.1: - resolution: {integrity: sha512-yhRwoHtiLGvmSozNOALgjRPFI6uYsds60EoMqqnXyyv+JOIW/BrrLejuTGBt+bq0T5tLzOHrN0T7xYTm4Qt/ng==} - import-local@3.1.0: resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} engines: {node: '>=8'} @@ -3536,6 +3642,10 @@ packages: resolution: {integrity: sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==} engines: {node: '>= 0.4.0'} + node-abi@3.67.0: + resolution: {integrity: sha512-bLn/fU/ALVBE9wj+p4Y21ZJWYFjUXLXPi/IewyLZkx3ApxKDNBWCKdReeKOtD8dWpOdDCeMyLh6ZewzcLsG2Nw==} + engines: {node: '>=10'} + node-abort-controller@3.1.1: resolution: {integrity: sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==} @@ -3623,9 +3733,14 @@ packages: openai@3.3.0: resolution: {integrity: sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==} - openai@4.52.2: - resolution: {integrity: sha512-mMc0XgFuVSkcm0lRIi8zaw++otC82ZlfkCur1qguXYWPETr/+ZwL9A/vvp3YahX+shpaT6j03dwsmUyLAfmEfg==} + openai@4.57.0: + resolution: {integrity: sha512-JnwBSIYqiZ3jYjB5f2in8hQ0PRA092c6m+/6dYB0MzK0BEbn+0dioxZsPLBm5idJbg9xzLNOiGVm2OSuhZ+BdQ==} hasBin: true + peerDependencies: + zod: ^3.23.8 + peerDependenciesMeta: + zod: + optional: true openapi-types@12.1.3: resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==} @@ -3634,9 +3749,11 @@ packages: resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} hasBin: true - opentelemetry-instrumentation-fetch-node@1.2.0: - resolution: {integrity: sha512-aiSt/4ubOTyb1N5C2ZbGrBvaJOXIZhZvpRPYuUVxQJe27wJZqf/o65iPrqgLcgfeOLaQ8cS2Q+762jrYvniTrA==} + opentelemetry-instrumentation-fetch-node@1.2.3: + resolution: {integrity: sha512-Qb11T7KvoCevMaSeuamcLsAD+pZnavkhDnlVL0kRozfhl42dKG5Q3anUklAFKJZjY3twLR+BnRa6DlwwkIE/+A==} engines: {node: '>18.0.0'} + peerDependencies: + '@opentelemetry/api': ^1.6.0 option@0.2.4: resolution: {integrity: sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A==} @@ -4268,6 +4385,12 @@ packages: resolution: {integrity: sha512-SzRP5LQ6Ts2G5NyAa/jg16s8e3R7rfdFjizy1zeoecYWw+nGL+YA1xZvW/+iJmidBGSdLkuvdwTYEyJEb+EiUw==} engines: {node: '>=0.2.6'} + systeminformation@5.22.11: + resolution: {integrity: sha512-aLws5yi4KCHTb0BVvbodQY5bY8eW4asMRDTxTW46hqw9lGjACX6TlLdJrkdoHYRB0qs+MekqEq1zG7WDnWE8Ug==} + engines: {node: '>=8.0.0'} + os: [darwin, linux, win32, freebsd, openbsd, netbsd, sunos, android] + hasBin: true + tar-fs@3.0.5: resolution: {integrity: sha512-JOgGAmZyMgbqpLwct7ZV8VzkEB6pxXFBVErLtb+XCOqzc6w1xiWKI9GVd6bwk68EX7eJ4DWmfXVmq8K2ziZTGg==} @@ -4460,10 +4583,6 @@ packages: resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} hasBin: true - uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - uuid@9.0.1: resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} hasBin: true @@ -4554,8 +4673,20 @@ packages: resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - ws@8.17.1: - resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==} + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.18.0: + resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 @@ -4945,7 +5076,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@hyperdx/node-opentelemetry@0.8.0': + '@hyperdx/node-opentelemetry@0.8.1': dependencies: '@hyperdx/instrumentation-exception': 0.1.0(@opentelemetry/api@1.9.0) '@hyperdx/instrumentation-sentry-node': 0.1.0(@opentelemetry/api@1.9.0) @@ -4970,6 +5101,7 @@ snapshots: lodash.isobject: 3.0.2 lodash.isplainobject: 4.0.6 lodash.isstring: 4.0.1 + node-fetch: 2.7.0 open: 8.4.2 ora: 5.4.1 pino-abstract-transport: 1.2.0 @@ -5192,13 +5324,13 @@ snapshots: '@js-sdsl/ordered-map@4.4.2': {} - '@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2)': + '@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8))': dependencies: ansi-styles: 5.2.0 camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.12 - langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) ml-distance: 4.0.1 mustache: 4.2.0 p-queue: 6.6.2 @@ -5210,20 +5342,20 @@ snapshots: - langchain - openai - '@langchain/openai@0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))': + '@langchain/openai@0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))': dependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) js-tiktoken: 1.0.12 - openai: 4.52.2 + openai: 4.57.0(zod@3.23.8) zod: 3.23.8 zod-to-json-schema: 3.23.1(zod@3.23.8) transitivePeerDependencies: - encoding - langchain - '@langchain/textsplitters@0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2)': + '@langchain/textsplitters@0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8))': dependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) js-tiktoken: 1.0.12 transitivePeerDependencies: - langchain @@ -5492,7 +5624,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-connect@0.37.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-connect@0.38.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) @@ -5535,7 +5667,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-express@0.40.1(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-express@0.41.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) @@ -5553,7 +5685,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-fastify@0.37.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-fastify@0.38.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) @@ -5570,6 +5702,14 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/instrumentation-fs@0.14.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) + transitivePeerDependencies: + - supports-color + '@opentelemetry/instrumentation-generic-pool@0.36.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -5585,7 +5725,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-graphql@0.41.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-graphql@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5609,7 +5749,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-hapi@0.39.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-hapi@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) @@ -5647,7 +5787,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-ioredis@0.41.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-ioredis@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5675,14 +5815,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-koa@0.41.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-koa@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.1 - '@types/koa': 2.14.0 - '@types/koa__router': 12.0.3 transitivePeerDependencies: - supports-color @@ -5711,7 +5849,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mongodb@0.45.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-mongodb@0.46.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5729,7 +5867,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mongoose@0.39.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-mongoose@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) @@ -5747,7 +5885,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mysql2@0.39.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-mysql2@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5765,7 +5903,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mysql@0.39.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-mysql@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5782,7 +5920,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-nestjs-core@0.38.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-nestjs-core@0.39.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5809,7 +5947,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-pg@0.42.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-pg@0.43.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5836,7 +5974,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-redis-4@0.40.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-redis-4@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -5911,11 +6049,11 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation@0.43.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation@0.46.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@types/shimmer': 1.0.5 - import-in-the-middle: 1.4.2 + import-in-the-middle: 1.7.1 require-in-the-middle: 7.3.0 semver: 7.6.2 shimmer: 1.2.1 @@ -5940,7 +6078,7 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.52.1 '@types/shimmer': 1.0.5 - import-in-the-middle: 1.8.1 + import-in-the-middle: 1.11.0 require-in-the-middle: 7.3.0 semver: 7.6.2 shimmer: 1.2.1 @@ -6130,7 +6268,7 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@prisma/instrumentation@5.16.0': + '@prisma/instrumentation@5.17.0': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) @@ -6213,62 +6351,126 @@ snapshots: domhandler: 5.0.3 selderee: 0.11.0 + '@sentry/cli-darwin@2.33.1': + optional: true + + '@sentry/cli-linux-arm64@2.33.1': + optional: true + + '@sentry/cli-linux-arm@2.33.1': + optional: true + + '@sentry/cli-linux-i686@2.33.1': + optional: true + + '@sentry/cli-linux-x64@2.33.1': + optional: true + + '@sentry/cli-win32-i686@2.33.1': + optional: true + + '@sentry/cli-win32-x64@2.33.1': + optional: true + + '@sentry/cli@2.33.1': + dependencies: + https-proxy-agent: 5.0.1 + node-fetch: 2.7.0 + progress: 2.0.3 + proxy-from-env: 1.1.0 + which: 2.0.2 + optionalDependencies: + '@sentry/cli-darwin': 2.33.1 + '@sentry/cli-linux-arm': 2.33.1 + '@sentry/cli-linux-arm64': 2.33.1 + '@sentry/cli-linux-i686': 2.33.1 + '@sentry/cli-linux-x64': 2.33.1 + '@sentry/cli-win32-i686': 2.33.1 + '@sentry/cli-win32-x64': 2.33.1 + transitivePeerDependencies: + - encoding + - supports-color + '@sentry/core@8.13.0': dependencies: '@sentry/types': 8.13.0 '@sentry/utils': 8.13.0 - '@sentry/node@8.13.0': + '@sentry/core@8.26.0': + dependencies: + '@sentry/types': 8.26.0 + '@sentry/utils': 8.26.0 + + '@sentry/node@8.26.0': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/context-async-hooks': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-connect': 0.37.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-express': 0.40.1(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-fastify': 0.37.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-graphql': 0.41.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-hapi': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-connect': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-express': 0.41.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-fastify': 0.38.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-fs': 0.14.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-graphql': 0.42.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-hapi': 0.40.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-http': 0.52.1(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-ioredis': 0.41.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-koa': 0.41.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-mongodb': 0.45.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-mongoose': 0.39.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-mysql': 0.39.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-mysql2': 0.39.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-nestjs-core': 0.38.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-pg': 0.42.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-redis-4': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-ioredis': 0.42.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-koa': 0.42.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mongodb': 0.46.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mongoose': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql2': 0.40.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-nestjs-core': 0.39.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-pg': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-redis-4': 0.41.0(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.1 - '@prisma/instrumentation': 5.16.0 - '@sentry/core': 8.13.0 - '@sentry/opentelemetry': 8.13.0(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.52.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.1) - '@sentry/types': 8.13.0 - '@sentry/utils': 8.13.0 + '@prisma/instrumentation': 5.17.0 + '@sentry/core': 8.26.0 + '@sentry/opentelemetry': 8.26.0(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.52.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.1) + '@sentry/types': 8.26.0 + '@sentry/utils': 8.26.0 + import-in-the-middle: 1.11.0 optionalDependencies: - opentelemetry-instrumentation-fetch-node: 1.2.0 + opentelemetry-instrumentation-fetch-node: 1.2.3(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@sentry/opentelemetry@8.13.0(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.52.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.1)': + '@sentry/opentelemetry@8.26.0(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.52.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.25.1)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.1 - '@sentry/core': 8.13.0 - '@sentry/types': 8.13.0 - '@sentry/utils': 8.13.0 + '@sentry/core': 8.26.0 + '@sentry/types': 8.26.0 + '@sentry/utils': 8.26.0 + + '@sentry/profiling-node@8.26.0': + dependencies: + '@sentry/core': 8.26.0 + '@sentry/node': 8.26.0 + '@sentry/types': 8.26.0 + '@sentry/utils': 8.26.0 + detect-libc: 2.0.3 + node-abi: 3.67.0 + transitivePeerDependencies: + - supports-color '@sentry/types@8.13.0': {} + '@sentry/types@8.26.0': {} + '@sentry/utils@8.13.0': dependencies: '@sentry/types': 8.13.0 + '@sentry/utils@8.26.0': + dependencies: + '@sentry/types': 8.26.0 + '@sinclair/typebox@0.27.8': {} '@sinonjs/commons@3.0.1': @@ -6381,8 +6583,8 @@ snapshots: dependencies: '@supabase/node-fetch': 2.6.15 '@types/phoenix': 1.6.5 - '@types/ws': 8.5.10 - ws: 8.17.1 + '@types/ws': 8.5.12 + ws: 8.18.0 transitivePeerDependencies: - bufferutil - utf-8-validate @@ -6447,12 +6649,6 @@ snapshots: '@types/connect': 3.4.38 '@types/node': 20.14.1 - '@types/bull@4.10.0': - dependencies: - bull: 4.15.0 - transitivePeerDependencies: - - supports-color - '@types/bunyan@1.8.9': dependencies: '@types/node': 20.14.1 @@ -6485,6 +6681,12 @@ snapshots: '@types/range-parser': 1.2.7 '@types/send': 0.17.4 + '@types/express-ws@3.0.4': + dependencies: + '@types/express': 4.17.21 + '@types/express-serve-static-core': 4.19.3 + '@types/ws': 8.5.12 + '@types/express@4.17.21': dependencies: '@types/body-parser': 1.19.5 @@ -6608,7 +6810,7 @@ snapshots: dependencies: '@types/webidl-conversions': 7.0.3 - '@types/ws@8.5.10': + '@types/ws@8.5.12': dependencies: '@types/node': 20.14.1 @@ -6655,6 +6857,12 @@ snapshots: afinn-165@1.0.4: {} + agent-base@6.0.2: + dependencies: + debug: 4.3.5 + transitivePeerDependencies: + - supports-color + agent-base@7.1.1: dependencies: debug: 4.3.5 @@ -6923,15 +7131,15 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bull@4.15.0: + bullmq@5.11.0: dependencies: cron-parser: 4.9.0 - get-port: 5.1.1 ioredis: 5.4.1 - lodash: 4.17.21 msgpackr: 1.10.2 + node-abort-controller: 3.1.1 semver: 7.6.2 - uuid: 8.3.2 + tslib: 2.6.3 + uuid: 9.0.1 transitivePeerDependencies: - supports-color @@ -7206,8 +7414,7 @@ snapshots: destroy@1.2.0: {} - detect-libc@2.0.3: - optional: true + detect-libc@2.0.3: {} detect-newline@3.1.0: {} @@ -7244,6 +7451,15 @@ snapshots: domelementtype: 2.3.0 domhandler: 5.0.3 + dotenv-cli@7.4.2: + dependencies: + cross-spawn: 7.0.3 + dotenv: 16.4.5 + dotenv-expand: 10.0.0 + minimist: 1.2.8 + + dotenv-expand@10.0.0: {} + dotenv@16.4.5: {} duck@0.1.12: @@ -7349,6 +7565,14 @@ snapshots: dependencies: express: 4.19.2 + express-ws@5.0.2(express@4.19.2): + dependencies: + express: 4.19.2 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + express@4.19.2: dependencies: accepts: 1.3.8 @@ -7532,8 +7756,6 @@ snapshots: get-package-type@0.1.0: {} - get-port@5.1.1: {} - get-stream@5.2.0: dependencies: pump: 3.0.0 @@ -7676,6 +7898,13 @@ snapshots: - debug - supports-color + https-proxy-agent@5.0.1: + dependencies: + agent-base: 6.0.2 + debug: 4.3.5 + transitivePeerDependencies: + - supports-color + https-proxy-agent@7.0.4: dependencies: agent-base: 7.1.1 @@ -7715,7 +7944,14 @@ snapshots: parent-module: 1.0.1 resolve-from: 4.0.0 - import-in-the-middle@1.4.2: + import-in-the-middle@1.11.0: + dependencies: + acorn: 8.12.0 + acorn-import-attributes: 1.9.5(acorn@8.12.0) + cjs-module-lexer: 1.3.1 + module-details-from-path: 1.0.3 + + import-in-the-middle@1.7.1: dependencies: acorn: 8.12.0 acorn-import-assertions: 1.9.0(acorn@8.12.0) @@ -7730,13 +7966,6 @@ snapshots: cjs-module-lexer: 1.3.1 module-details-from-path: 1.0.3 - import-in-the-middle@1.8.1: - dependencies: - acorn: 8.12.0 - acorn-import-attributes: 1.9.5(acorn@8.12.0) - cjs-module-lexer: 1.3.1 - module-details-from-path: 1.0.3 - import-local@3.1.0: dependencies: pkg-dir: 4.2.0 @@ -8263,17 +8492,17 @@ snapshots: kleur@3.0.3: {} - langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1): + langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0): dependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) - '@langchain/openai': 0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1)) - '@langchain/textsplitters': 0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) + '@langchain/openai': 0.2.1(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0)) + '@langchain/textsplitters': 0.0.3(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) binary-extensions: 2.3.0 js-tiktoken: 1.0.12 js-yaml: 4.1.0 jsonpointer: 5.0.1 langchainhub: 0.0.11 - langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) + langsmith: 0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) ml-distance: 4.0.1 openapi-types: 12.1.3 p-retry: 4.6.2 @@ -8293,14 +8522,14 @@ snapshots: pdf-parse: 1.1.1 puppeteer: 22.12.1(typescript@5.4.5) redis: 4.6.14 - ws: 8.17.1 + ws: 8.18.0 transitivePeerDependencies: - encoding - openai langchainhub@0.0.11: {} - langsmith@0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2): + langsmith@0.1.34(@langchain/core@0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)))(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)): dependencies: '@types/uuid': 9.0.8 commander: 10.0.1 @@ -8309,9 +8538,9 @@ snapshots: p-retry: 4.6.2 uuid: 9.0.1 optionalDependencies: - '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1))(openai@4.52.2) - langchain: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.52.2)(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.17.1) - openai: 4.52.2 + '@langchain/core': 0.2.12(langchain@0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0))(openai@4.57.0(zod@3.23.8)) + langchain: 0.2.8(@supabase/supabase-js@2.44.2)(axios@1.7.2)(cheerio@1.0.0-rc.12)(handlebars@4.7.8)(html-to-text@9.0.5)(ioredis@5.4.1)(mammoth@1.7.2)(mongodb@6.6.2(socks@2.8.3))(openai@4.57.0(zod@3.23.8))(pdf-parse@1.1.1)(puppeteer@22.12.1(typescript@5.4.5))(redis@4.6.14)(ws@8.18.0) + openai: 4.57.0(zod@3.23.8) languagedetect@2.0.0: {} @@ -8615,6 +8844,10 @@ snapshots: netmask@2.0.2: {} + node-abi@3.67.0: + dependencies: + semver: 7.6.2 + node-abort-controller@3.1.1: {} node-domexception@1.0.0: {} @@ -8700,16 +8933,19 @@ snapshots: transitivePeerDependencies: - debug - openai@4.52.2: + openai@4.57.0(zod@3.23.8): dependencies: '@types/node': 18.19.39 '@types/node-fetch': 2.6.11 + '@types/qs': 6.9.15 abort-controller: 3.0.0 agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 node-fetch: 2.7.0 - web-streams-polyfill: 3.3.3 + qs: 6.12.2 + optionalDependencies: + zod: 3.23.8 transitivePeerDependencies: - encoding @@ -8717,10 +8953,10 @@ snapshots: opener@1.5.2: {} - opentelemetry-instrumentation-fetch-node@1.2.0: + opentelemetry-instrumentation-fetch-node@1.2.3(@opentelemetry/api@1.9.0): dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.46.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.25.1 transitivePeerDependencies: - supports-color @@ -9014,7 +9250,7 @@ snapshots: chromium-bidi: 0.5.24(devtools-protocol@0.0.1299070) debug: 4.3.5 devtools-protocol: 0.0.1299070 - ws: 8.17.1 + ws: 8.18.0 transitivePeerDependencies: - bufferutil - supports-color @@ -9433,6 +9669,8 @@ snapshots: sylvester@0.0.12: {} + systeminformation@5.22.11: {} + tar-fs@3.0.5: dependencies: pump: 3.0.0 @@ -9605,8 +9843,6 @@ snapshots: uuid@10.0.0: {} - uuid@8.3.2: {} - uuid@9.0.1: {} v8-compile-cache-lib@3.0.1: {} @@ -9696,7 +9932,9 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 4.1.0 - ws@8.17.1: {} + ws@7.5.10: {} + + ws@8.18.0: {} xml2js@0.6.2: dependencies: diff --git a/apps/api/requests.http b/apps/api/requests.http index 3a1a9902..3e7bd2b7 100644 --- a/apps/api/requests.http +++ b/apps/api/requests.http @@ -1,12 +1,16 @@ ### Crawl Website POST http://localhost:3002/v0/scrape HTTP/1.1 -Authorization: Bearer fc +Authorization: Bearer fc- content-type: application/json { - "url":"firecrawl.dev" + "url":"corterix.com" } +### Check Job Status +GET http://localhost:3002/v1/crawl/1dd0f924-a36f-4b96-94ea-32ed954dac67 HTTP/1.1 +Authorization: Bearer fc- + ### Check Job Status GET http://localhost:3002/v0/jobs/active HTTP/1.1 diff --git a/apps/api/src/__tests__/e2e_full_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_full_withAuth/index.test.ts index 019bc968..b1708abc 100644 --- a/apps/api/src/__tests__/e2e_full_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_full_withAuth/index.test.ts @@ -404,7 +404,7 @@ describe("E2E Tests for API Routes", () => { .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") .set("x-idempotency-key", uniqueIdempotencyKey) - .send({ url: 'https://mendable.ai' }); + .send({ url: 'https://docs.firecrawl.dev' }); expect(firstResponse.statusCode).toBe(200); @@ -414,7 +414,7 @@ describe("E2E Tests for API Routes", () => { .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") .set("x-idempotency-key", uniqueIdempotencyKey) - .send({ url: 'https://mendable.ai' }); + .send({ url: 'https://docs.firecrawl.dev' }); expect(secondResponse.statusCode).toBe(409); expect(secondResponse.body.error).toBe('Idempotency key already used'); diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts new file mode 100644 index 00000000..dd7d4f16 --- /dev/null +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -0,0 +1,951 @@ +import request from "supertest"; +import dotenv from "dotenv"; +import { + ScrapeRequest, + ScrapeResponseRequestTest, +} from "../../controllers/v1/types"; + +dotenv.config(); +const TEST_URL = "http://127.0.0.1:3002"; + +describe("E2E Tests for v1 API Routes", () => { + beforeAll(() => { + process.env.USE_DB_AUTHENTICATION = "true"; + }); + + afterAll(() => { + delete process.env.USE_DB_AUTHENTICATION; + }); + + describe("GET /is-production", () => { + it.concurrent("should return the production status", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).get( + "/is-production" + ); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("isProduction"); + }); + }); + + describe("POST /v1/scrape", () => { + it.concurrent("should require authorization", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).post( + "/v1/scrape" + ); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should throw error for blocklisted URL", async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://facebook.com/fake-test", + }; + + const response = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(403); + expect(response.body.error).toBe("URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions."); + }); + + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return a successful response with a valid API key", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).not.toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.markdown).toContain("_Roast_"); + expect(response.body.data.metadata.error).toBeUndefined(); + expect(response.body.data.metadata.title).toBe("Roast My Website"); + expect(response.body.data.metadata.description).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.keywords).toBe( + "Roast My Website,Roast,Website,GitHub,Firecrawl" + ); + expect(response.body.data.metadata.robots).toBe("follow, index"); + expect(response.body.data.metadata.ogTitle).toBe("Roast My Website"); + expect(response.body.data.metadata.ogDescription).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.ogUrl).toBe( + "https://www.roastmywebsite.ai" + ); + expect(response.body.data.metadata.ogImage).toBe( + "https://www.roastmywebsite.ai/og.png" + ); + expect(response.body.data.metadata.ogLocaleAlternate).toStrictEqual([]); + expect(response.body.data.metadata.ogSiteName).toBe("Roast My Website"); + expect(response.body.data.metadata.sourceURL).toBe( + "https://roastmywebsite.ai" + ); + expect(response.body.data.metadata.statusCode).toBe(200); + }, + 30000 + ); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key and includeHtml set to true", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + formats: ["markdown", "html"], + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("html"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.markdown).toContain("_Roast_"); + expect(response.body.data.html).toContain(" { + const scrapeRequest: ScrapeRequest = { + url: "https://arxiv.org/pdf/astro-ph/9301001.pdf" + // formats: ["markdown", "html"], + }; + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send(scrapeRequest); + await new Promise((r) => setTimeout(r, 6000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.markdown).toContain('Broad Line Radio Galaxy'); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + }, 60000); + + it.concurrent('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://arxiv.org/pdf/astro-ph/9301001" + }; + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send(scrapeRequest); + await new Promise((r) => setTimeout(r, 6000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.markdown).toContain('Broad Line Radio Galaxy'); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + }, 60000); + + it.concurrent("should return a successful response with a valid API key with removeTags option", async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://www.scrapethissite.com/", + onlyMainContent: false // default is true + }; + const responseWithoutRemoveTags: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + expect(responseWithoutRemoveTags.statusCode).toBe(200); + expect(responseWithoutRemoveTags.body).toHaveProperty("data"); + + if (!("data" in responseWithoutRemoveTags.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); + expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); + expect(responseWithoutRemoveTags.body.data.markdown).toContain("[FAQ](/faq/)"); // .nav + expect(responseWithoutRemoveTags.body.data.markdown).toContain("Hartley Brody 2023"); // #footer + + const scrapeRequestWithRemoveTags: ScrapeRequest = { + url: "https://www.scrapethissite.com/", + excludeTags: ['.nav', '#footer', 'strong'], + onlyMainContent: false // default is true + }; + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequestWithRemoveTags); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.markdown).not.toContain("Hartley Brody 2023"); + expect(response.body.data.markdown).not.toContain("[FAQ](/faq/)"); // + }, 30000); + + it.concurrent('should return a successful response for a scrape with 400 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/400' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(400); + }, 60000); + + + it.concurrent('should return a successful response for a scrape with 401 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/401' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(401); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 403 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/403' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(403); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 404 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/404' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(404); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 405 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/405' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(405); + }, 60000); + + it.concurrent('should return a successful response for a scrape with 500 page', async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post('/v1/scrape') + .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) + .set('Content-Type', 'application/json') + .send({ url: 'https://httpstat.us/500' }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty('data'); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty('markdown'); + expect(response.body.data).toHaveProperty('metadata'); + expect(response.body.data.metadata.statusCode).toBe(500); + }, 60000); + + it.concurrent("should return a timeout error when scraping takes longer than the specified timeout", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev", timeout: 1000 }); + + expect(response.statusCode).toBe(408); + }, 3000); + + it.concurrent( + "should return a successful response with a valid API key and includeHtml set to true", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + formats: ["html","rawHtml"], + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).not.toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("html"); + expect(response.body.data).toHaveProperty("rawHtml"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.html).toContain(" { + const scrapeRequest: ScrapeRequest = { + url: "https://ycombinator.com/companies", + formats: ["markdown"], + waitFor: 5000 + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data).not.toHaveProperty("links"); + expect(response.body.data).not.toHaveProperty("rawHtml"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.markdown).toContain("PagerDuty"); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + + }, + 30000 + ); + + it.concurrent( + "should return a successful response with a valid links on page", + async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://roastmywebsite.ai", + formats: ["links"], + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + if (!("data" in response.body)) { + throw new Error("Expected response body to have 'data' property"); + } + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data).not.toHaveProperty("rawHtml"); + expect(response.body.data).toHaveProperty("links"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.links).toContain("https://firecrawl.dev"); + expect(response.body.data.metadata.statusCode).toBe(200); + expect(response.body.data.metadata.error).toBeUndefined(); + }, + 30000 + ); + + + }); + +describe("POST /v1/map", () => { + it.concurrent("should require authorization", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).post( + "/v1/map" + ); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should return an error response with an invalid API key", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should return a successful response with a valid API key", async () => { + const mapRequest = { + url: "https://roastmywebsite.ai" + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + }); + + it.concurrent("should return a successful response with a valid API key and search", async () => { + const mapRequest = { + url: "https://usemotion.com", + search: "pricing" + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).toContain("usemotion.com/pricing"); + }); + + it.concurrent("should return a successful response with a valid API key and search and allowSubdomains", async () => { + const mapRequest = { + url: "https://firecrawl.dev", + search: "docs", + includeSubdomains: true + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).toContain("docs.firecrawl.dev"); + }); + + it.concurrent("should return a successful response with a valid API key and search and allowSubdomains and www", async () => { + const mapRequest = { + url: "https://www.firecrawl.dev", + search: "docs", + includeSubdomains: true + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).toContain("docs.firecrawl.dev"); + }, 10000) + + it.concurrent("should return a successful response with a valid API key and search and not allowSubdomains and www", async () => { + const mapRequest = { + url: "https://www.firecrawl.dev", + search: "docs", + includeSubdomains: false + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("links"); + if (!("links" in response.body)) { + throw new Error("Expected response body to have 'links' property"); + } + const links = response.body.links as unknown[]; + expect(Array.isArray(links)).toBe(true); + expect(links.length).toBeGreaterThan(0); + expect(links[0]).not.toContain("docs.firecrawl.dev"); + }) + + it.concurrent("should return an error for invalid URL", async () => { + const mapRequest = { + url: "invalid-url", + includeSubdomains: true, + search: "test", + }; + + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/map") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(mapRequest); + + expect(response.statusCode).toBe(400); + expect(response.body).toHaveProperty("success", false); + expect(response.body).toHaveProperty("error"); + }); +}); + + +describe("POST /v1/crawl", () => { + it.concurrent("should require authorization", async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL).post( + "/v1/crawl" + ); + expect(response.statusCode).toBe(401); + }); + + it.concurrent("should throw error for blocklisted URL", async () => { + const scrapeRequest: ScrapeRequest = { + url: "https://facebook.com/fake-test", + }; + + const response = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); + + expect(response.statusCode).toBe(403); + expect(response.body.error).toBe("URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions."); + }); + + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: ScrapeResponseRequestTest = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent("should return a successful response", async () => { + const response = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("id"); + expect(response.body.id).toMatch( + /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ + ); + expect(response.body).toHaveProperty("success", true); + expect(response.body).toHaveProperty("url"); + expect(response.body.url).toContain("/v1/crawl/"); + }); + + it.concurrent( + "should return a successful response with a valid API key and valid includes option", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://firecrawl.dev", + limit: 10, + includePaths: ["blog/*"], + }); + + let response; + let isFinished = false; + + while (!isFinished) { + response = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + isFinished = response.body.status === "completed"; + + if (!isFinished) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(5); + urls.forEach((url: string) => { + expect(url).toContain("firecrawl.dev/blog"); + }); + + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0]).not.toHaveProperty("content"); // v0 + expect(completedResponse.body.data[0].metadata.statusCode).toBe(200); + expect(completedResponse.body.data[0].metadata.error).toBeUndefined(); + }, + 180000 + ); // 180 seconds + + it.concurrent( + "should return a successful response with a valid API key and valid excludes option", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://firecrawl.dev", + limit: 10, + excludePaths: ["blog/*"], + }); + + let isFinished = false; + let response; + + while (!isFinished) { + response = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + isFinished = response.body.status === "completed"; + + if (!isFinished) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request( + TEST_URL + ) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(3); + urls.forEach((url: string) => { + expect(url.startsWith("https://www.firecrawl.dev/blog/")).toBeFalsy(); + }); + }, + 90000 + ); // 90 seconds + + it.concurrent( + "should return a successful response with max depth option for a valid crawl job", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://www.scrapethissite.com", + maxDepth: 1, + }); + expect(crawlResponse.statusCode).toBe(200); + + const response = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + expect(["active", "waiting", "completed", "scraping"]).toContain(response.body.status); + // wait for 60 seconds + let isCompleted = false; + while (!isCompleted) { + const statusCheckResponse = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(statusCheckResponse.statusCode).toBe(200); + isCompleted = statusCheckResponse.body.status === "completed"; + if (!isCompleted) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } + } + const completedResponse = await request( + TEST_URL + ) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).not.toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].metadata.statusCode).toBe(200); + expect(completedResponse.body.data[0].metadata.error).toBeUndefined(); + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThanOrEqual(1); + + // Check if all URLs have a maximum depth of 1 + urls.forEach((url: string) => { + const pathSplits = new URL(url).pathname.split("/"); + const depth = + pathSplits.length - + (pathSplits[0].length === 0 && + pathSplits[pathSplits.length - 1].length === 0 + ? 1 + : 0); + expect(depth).toBeLessThanOrEqual(2); + }); + }, + 180000 + ); +}) + +describe("GET /v1/crawl/:jobId", () => { + it.concurrent("should require authorization", async () => { + const response = await request(TEST_URL).get("/v1/crawl/123"); + expect(response.statusCode).toBe(401); + }); + + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response = await request(TEST_URL) + .get("/v1/crawl/123") + .set("Authorization", `Bearer invalid-api-key`); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return Job not found for invalid job ID", + async () => { + const response = await request(TEST_URL) + .get("/v1/crawl/invalidJobId") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(404); + } + ); + + it.concurrent( + "should return a successful crawl status response for a valid crawl job", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://docs.mendable.ai" }); + expect(crawlResponse.statusCode).toBe(200); + + let isCompleted = false; + + while (!isCompleted) { + const response = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + + if (response.body.status === "completed") { + isCompleted = true; + } else { + await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).not.toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].metadata.statusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.error + ).toBeUndefined(); + + const childrenLinks = completedResponse.body.data.filter( + (doc) => + doc.metadata && + doc.metadata.sourceURL + ); + + expect(childrenLinks.length).toBe(completedResponse.body.data.length); + }, + 180000 + ); // 120 seconds + + it.concurrent( + "If someone cancels a crawl job, it should turn into failed status", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v1/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://docs.tatum.io", limit: 200 }); + + expect(crawlResponse.statusCode).toBe(200); + + await new Promise((r) => setTimeout(r, 10000)); + + const responseCancel = await request(TEST_URL) + .delete(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(responseCancel.statusCode).toBe(200); + expect(responseCancel.body).toHaveProperty("status"); + expect(responseCancel.body.status).toBe("cancelled"); + + await new Promise((r) => setTimeout(r, 10000)); + const completedResponse = await request(TEST_URL) + .get(`/v1/crawl/${crawlResponse.body.id}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("cancelled"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].metadata.statusCode).toBe(200); + expect(completedResponse.body.data[0].metadata.error).toBeUndefined(); + }, + 60000 + ); // 60 seconds +}) +}); diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index b5bc54a5..330f8130 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -1,11 +1,15 @@ import request from "supertest"; import dotenv from "dotenv"; -import { FirecrawlCrawlResponse, FirecrawlCrawlStatusResponse, FirecrawlScrapeResponse } from "../../types"; +import { + FirecrawlCrawlResponse, + FirecrawlCrawlStatusResponse, + FirecrawlScrapeResponse, +} from "../../types"; dotenv.config(); const TEST_URL = "http://127.0.0.1:3002"; -describe("E2E Tests for API Routes", () => { +describe("E2E Tests for v0 API Routes", () => { beforeAll(() => { process.env.USE_DB_AUTHENTICATION = "true"; }); @@ -24,276 +28,365 @@ describe("E2E Tests for API Routes", () => { describe("POST /v0/scrape", () => { it.concurrent("should require authorization", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL).post("/v0/scrape"); + const response: FirecrawlScrapeResponse = await request(TEST_URL).post( + "/v0/scrape" + ); expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(401); - }); + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); - it.concurrent("should return a successful response with a valid API key", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://roastmywebsite.ai" }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("data"); - expect(response.body.data).toHaveProperty("content"); - expect(response.body.data).toHaveProperty("markdown"); - expect(response.body.data).toHaveProperty("metadata"); - expect(response.body.data).not.toHaveProperty("html"); - expect(response.body.data.content).toContain("_Roast_"); - expect(response.body.data.metadata.pageError).toBeUndefined(); - expect(response.body.data.metadata.title).toBe("Roast My Website"); - expect(response.body.data.metadata.description).toBe("Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"); - expect(response.body.data.metadata.keywords).toBe("Roast My Website,Roast,Website,GitHub,Firecrawl"); - expect(response.body.data.metadata.robots).toBe("follow, index"); - expect(response.body.data.metadata.ogTitle).toBe("Roast My Website"); - expect(response.body.data.metadata.ogDescription).toBe("Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️"); - expect(response.body.data.metadata.ogUrl).toBe("https://www.roastmywebsite.ai"); - expect(response.body.data.metadata.ogImage).toBe("https://www.roastmywebsite.ai/og.png"); - expect(response.body.data.metadata.ogLocaleAlternate).toStrictEqual([]); - expect(response.body.data.metadata.ogSiteName).toBe("Roast My Website"); - expect(response.body.data.metadata.sourceURL).toBe("https://roastmywebsite.ai"); - expect(response.body.data.metadata.pageStatusCode).toBe(200); - }, 30000); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://roastmywebsite.ai" }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.content).toContain("_Roast_"); + expect(response.body.data.metadata.pageError).toBeUndefined(); + expect(response.body.data.metadata.title).toBe("Roast My Website"); + expect(response.body.data.metadata.description).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.keywords).toBe( + "Roast My Website,Roast,Website,GitHub,Firecrawl" + ); + expect(response.body.data.metadata.robots).toBe("follow, index"); + expect(response.body.data.metadata.ogTitle).toBe("Roast My Website"); + expect(response.body.data.metadata.ogDescription).toBe( + "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + ); + expect(response.body.data.metadata.ogUrl).toBe( + "https://www.roastmywebsite.ai" + ); + expect(response.body.data.metadata.ogImage).toBe( + "https://www.roastmywebsite.ai/og.png" + ); + expect(response.body.data.metadata.ogLocaleAlternate).toStrictEqual([]); + expect(response.body.data.metadata.ogSiteName).toBe("Roast My Website"); + expect(response.body.data.metadata.sourceURL).toBe( + "https://roastmywebsite.ai" + ); + expect(response.body.data.metadata.pageStatusCode).toBe(200); + }, + 30000 + ); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key and includeHtml set to true", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://roastmywebsite.ai", + pageOptions: { includeHtml: true }, + }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("html"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.content).toContain("_Roast_"); + expect(response.body.data.markdown).toContain("_Roast_"); + expect(response.body.data.html).toContain(" { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://roastmywebsite.ai", - pageOptions: { includeHtml: true }, - }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("data"); - expect(response.body.data).toHaveProperty("content"); - expect(response.body.data).toHaveProperty("markdown"); - expect(response.body.data).toHaveProperty("html"); - expect(response.body.data).toHaveProperty("metadata"); - expect(response.body.data.content).toContain("_Roast_"); - expect(response.body.data.markdown).toContain("_Roast_"); - expect(response.body.data.html).toContain(" { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001.pdf' }); - await new Promise((r) => setTimeout(r, 6000)); + it.concurrent( + "should return a successful response for a valid scrape with PDF file", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://arxiv.org/pdf/astro-ph/9301001.pdf" }); + await new Promise((r) => setTimeout(r, 6000)); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); - expect(response.body.data.metadata.pageStatusCode).toBe(200); - expect(response.body.data.metadata.pageError).toBeUndefined(); - }, 60000); // 60 seconds - - it.concurrent('should return a successful response for a valid scrape with PDF file without explicit .pdf extension', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://arxiv.org/pdf/astro-ph/9301001' }); - await new Promise((r) => setTimeout(r, 6000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.content).toContain( + "We present spectrophotometric observations of the Broad Line Radio Galaxy" + ); + expect(response.body.data.metadata.pageStatusCode).toBe(200); + expect(response.body.data.metadata.pageError).toBeUndefined(); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); - expect(response.body.data.metadata.pageStatusCode).toBe(200); - expect(response.body.data.metadata.pageError).toBeUndefined(); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a valid scrape with PDF file without explicit .pdf extension", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://arxiv.org/pdf/astro-ph/9301001" }); + await new Promise((r) => setTimeout(r, 6000)); - it.concurrent("should return a successful response with a valid API key with removeTags option", async () => { - const responseWithoutRemoveTags: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://www.scrapethissite.com/" }); - expect(responseWithoutRemoveTags.statusCode).toBe(200); - expect(responseWithoutRemoveTags.body).toHaveProperty("data"); - expect(responseWithoutRemoveTags.body.data).toHaveProperty("content"); - expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); - expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); - expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); - expect(responseWithoutRemoveTags.body.data.content).toContain("Scrape This Site"); - expect(responseWithoutRemoveTags.body.data.content).toContain("Lessons and Videos"); // #footer - expect(responseWithoutRemoveTags.body.data.content).toContain("[Sandbox]("); // .nav - expect(responseWithoutRemoveTags.body.data.content).toContain("web scraping"); // strong + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.content).toContain( + "We present spectrophotometric observations of the Broad Line Radio Galaxy" + ); + expect(response.body.data.metadata.pageStatusCode).toBe(200); + expect(response.body.data.metadata.pageError).toBeUndefined(); + }, + 60000 + ); // 60 seconds - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://www.scrapethissite.com/", pageOptions: { removeTags: ['.nav', '#footer', 'strong'] } }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("data"); - expect(response.body.data).toHaveProperty("content"); - expect(response.body.data).toHaveProperty("markdown"); - expect(response.body.data).toHaveProperty("metadata"); - expect(response.body.data).not.toHaveProperty("html"); - expect(response.body.data.content).toContain("Scrape This Site"); - expect(response.body.data.content).not.toContain("Lessons and Videos"); // #footer - expect(response.body.data.content).not.toContain("[Sandbox]("); // .nav - expect(response.body.data.content).not.toContain("web scraping"); // strong - }, 30000); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key with removeTags option", + async () => { + const responseWithoutRemoveTags: FirecrawlScrapeResponse = + await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://www.scrapethissite.com/" }); + expect(responseWithoutRemoveTags.statusCode).toBe(200); + expect(responseWithoutRemoveTags.body).toHaveProperty("data"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("content"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("markdown"); + expect(responseWithoutRemoveTags.body.data).toHaveProperty("metadata"); + expect(responseWithoutRemoveTags.body.data).not.toHaveProperty("html"); + expect(responseWithoutRemoveTags.body.data.content).toContain( + "Scrape This Site" + ); + expect(responseWithoutRemoveTags.body.data.content).toContain( + "Lessons and Videos" + ); // #footer + expect(responseWithoutRemoveTags.body.data.content).toContain( + "[Sandbox](" + ); // .nav + expect(responseWithoutRemoveTags.body.data.content).toContain( + "web scraping" + ); // strong - it.concurrent('should return a successful response for a scrape with 400 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/400' }); - await new Promise((r) => setTimeout(r, 5000)); + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://www.scrapethissite.com/", + pageOptions: { removeTags: [".nav", "#footer", "strong"] }, + }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("markdown"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data).not.toHaveProperty("html"); + expect(response.body.data.content).toContain("Scrape This Site"); + expect(response.body.data.content).not.toContain("Lessons and Videos"); // #footer + expect(response.body.data.content).not.toContain("[Sandbox]("); // .nav + expect(response.body.data.content).not.toContain("web scraping"); // strong + }, + 30000 + ); // 30 seconds timeout - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(400); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("bad request"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 400 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/400" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent('should return a successful response for a scrape with 401 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/401' }); - await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(400); + expect(response.body.data.metadata.pageError.toLowerCase()).toContain( + "bad request" + ); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(401); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("unauthorized"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 401 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/401" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent("should return a successful response for a scrape with 403 page", async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/403' }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(401); + expect(response.body.data.metadata.pageError.toLowerCase()).toContain( + "unauthorized" + ); + }, + 60000 + ); // 60 seconds - await new Promise((r) => setTimeout(r, 5000)); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(403); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("forbidden"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 403 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/403" }); - it.concurrent('should return a successful response for a scrape with 404 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/404' }); - await new Promise((r) => setTimeout(r, 5000)); + await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(403); + expect(response.body.data.metadata.pageError.toLowerCase()).toContain( + "forbidden" + ); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(404); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("not found"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 404 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/404" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent('should return a successful response for a scrape with 405 page', async () => { - const response = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/405' }); - await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(404); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(405); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("method not allowed"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 405 page", + async () => { + const response = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/405" }); + await new Promise((r) => setTimeout(r, 5000)); - it.concurrent('should return a successful response for a scrape with 500 page', async () => { - const response: FirecrawlScrapeResponse = await request(TEST_URL) - .post('/v0/scrape') - .set('Authorization', `Bearer ${process.env.TEST_API_KEY}`) - .set('Content-Type', 'application/json') - .send({ url: 'https://httpstat.us/500' }); - await new Promise((r) => setTimeout(r, 5000)); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(405); + }, + 60000 + ); // 60 seconds - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty('data'); - expect(response.body.data).toHaveProperty('content'); - expect(response.body.data).toHaveProperty('metadata'); - expect(response.body.data.metadata.pageStatusCode).toBe(500); - expect(response.body.data.metadata.pageError.toLowerCase()).toContain("internal server error"); - }, 60000); // 60 seconds + it.concurrent( + "should return a successful response for a scrape with 500 page", + async () => { + const response: FirecrawlScrapeResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://httpstat.us/500" }); + await new Promise((r) => setTimeout(r, 5000)); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("data"); + expect(response.body.data).toHaveProperty("content"); + expect(response.body.data).toHaveProperty("metadata"); + expect(response.body.data.metadata.pageStatusCode).toBe(500); + }, + 60000 + ); // 60 seconds }); describe("POST /v0/crawl", () => { it.concurrent("should require authorization", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL).post("/v0/crawl"); - expect(response.statusCode).toBe(401); - }); - - it.concurrent("should return an error response with an invalid API key", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(401); - }); - - it.concurrent("should return a successful response with a valid API key for crawl", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("jobId"); - expect(response.body.jobId).toMatch( - /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ + const response: FirecrawlCrawlResponse = await request(TEST_URL).post( + "/v0/crawl" ); + expect(response.statusCode).toBe(401); }); - - it.concurrent("should return a successful response with a valid API key and valid includes option", async () => { - const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://mendable.ai", - limit: 10, - crawlerOptions: { - includes: ["blog/*"], - }, - }); - + + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return a successful response with a valid API key for crawl", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("jobId"); + expect(response.body.jobId).toMatch( + /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ + ); + } + ); + + it.concurrent( + "should return a successful response with a valid API key and valid includes option", + async () => { + const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + limit: 10, + crawlerOptions: { + includes: ["blog/*"], + }, + }); + let response: FirecrawlCrawlStatusResponse; let isFinished = false; @@ -311,149 +404,191 @@ describe("E2E Tests for API Routes", () => { } } - const completedResponse = response; + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); const urls = completedResponse.body.data.map( - (item: any) => item.metadata?.sourceURL - ); - expect(urls.length).toBeGreaterThan(5); - urls.forEach((url: string) => { - expect(url.startsWith("https://www.mendable.ai/blog/")).toBeTruthy(); - }); - - expect(completedResponse.statusCode).toBe(200); - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("completed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data[0]).toHaveProperty("content"); - expect(completedResponse.body.data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].content).toContain("Mendable"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); - }, 180000); // 180 seconds - - it.concurrent("should return a successful response with a valid API key and valid excludes option", async () => { - const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://mendable.ai", - limit: 10, - crawlerOptions: { - excludes: ["blog/*"], - }, + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(5); + urls.forEach((url: string) => { + expect(url.startsWith("https://www.mendable.ai/blog/")).toBeTruthy(); }); - - let isFinished = false; - let response: FirecrawlCrawlStatusResponse; - while (!isFinished) { - response = await request(TEST_URL) + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].content).toContain("Mendable"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.pageError + ).toBeUndefined(); + }, + 180000 + ); // 180 seconds + + it.concurrent( + "should return a successful response with a valid API key and valid excludes option", + async () => { + const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + limit: 10, + crawlerOptions: { + excludes: ["blog/*"], + }, + }); + + let isFinished = false; + let response: FirecrawlCrawlStatusResponse; + + while (!isFinished) { + response = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + isFinished = response.body.status === "completed"; + + if (!isFinished) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse: FirecrawlCrawlStatusResponse = await request( + TEST_URL + ) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThan(5); + urls.forEach((url: string) => { + expect(url.startsWith("https://wwww.mendable.ai/blog/")).toBeFalsy(); + }); + }, + 90000 + ); // 90 seconds + + it.concurrent( + "should return a successful response with max depth option for a valid crawl job", + async () => { + const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://www.scrapethissite.com", + crawlerOptions: { maxDepth: 1 }, + }); + expect(crawlResponse.statusCode).toBe(200); + + const response: FirecrawlCrawlStatusResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); expect(response.statusCode).toBe(200); expect(response.body).toHaveProperty("status"); - isFinished = response.body.status === "completed"; - - if (!isFinished) { - await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + expect(["active", "waiting"]).toContain(response.body.status); + // wait for 60 seconds + let isCompleted = false; + while (!isCompleted) { + const statusCheckResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(statusCheckResponse.statusCode).toBe(200); + isCompleted = statusCheckResponse.body.status === "completed"; + if (!isCompleted) { + await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again + } } - } - - const completedResponse: FirecrawlCrawlStatusResponse = response; - - const urls = completedResponse.body.data.map( - (item: any) => item.metadata?.sourceURL - ); - expect(urls.length).toBeGreaterThan(5); - urls.forEach((url: string) => { - expect(url.startsWith("https://wwww.mendable.ai/blog/")).toBeFalsy(); - }); - }, 90000); // 90 seconds - - it.concurrent("should return a successful response with max depth option for a valid crawl job", async () => { - const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://www.scrapethissite.com", - crawlerOptions: { maxDepth: 1 }, - }); - expect(crawlResponse.statusCode).toBe(200); - - const response: FirecrawlCrawlStatusResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("status"); - expect(["active", "waiting"]).toContain(response.body.status); - // wait for 60 seconds - let isCompleted = false; - while (!isCompleted) { - const statusCheckResponse = await request(TEST_URL) + const completedResponse: FirecrawlCrawlStatusResponse = await request( + TEST_URL + ) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(statusCheckResponse.statusCode).toBe(200); - isCompleted = statusCheckResponse.body.status === "completed"; - if (!isCompleted) { - await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again - } - } - const completedResponse: FirecrawlCrawlStatusResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(completedResponse.statusCode).toBe(200); - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("completed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data[0]).toHaveProperty("content"); - expect(completedResponse.body.data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); - const urls = completedResponse.body.data.map( - (item: any) => item.metadata?.sourceURL - ); - expect(urls.length).toBeGreaterThan(1); + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.pageError + ).toBeUndefined(); + const urls = completedResponse.body.data.map( + (item: any) => item.metadata?.sourceURL + ); + expect(urls.length).toBeGreaterThanOrEqual(1); - // Check if all URLs have a maximum depth of 1 - urls.forEach((url: string) => { - const pathSplits = new URL(url).pathname.split('/'); - const depth = pathSplits.length - (pathSplits[0].length === 0 && pathSplits[pathSplits.length - 1].length === 0 ? 1 : 0); - expect(depth).toBeLessThanOrEqual(2); - }); - }, 180000); + // Check if all URLs have a maximum depth of 1 + urls.forEach((url: string) => { + const pathSplits = new URL(url).pathname.split("/"); + const depth = + pathSplits.length - + (pathSplits[0].length === 0 && + pathSplits[pathSplits.length - 1].length === 0 + ? 1 + : 0); + expect(depth).toBeLessThanOrEqual(2); + }); + }, + 180000 + ); }); describe("POST /v0/crawlWebsitePreview", () => { it.concurrent("should require authorization", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL).post("/v0/crawlWebsitePreview"); + const response: FirecrawlCrawlResponse = await request(TEST_URL).post( + "/v0/crawlWebsitePreview" + ); expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/crawlWebsitePreview") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev" }); - expect(response.statusCode).toBe(401); - }); + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/crawlWebsitePreview") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev" }); + expect(response.statusCode).toBe(401); + } + ); - it.concurrent("should return a timeout error when scraping takes longer than the specified timeout", async () => { - const response: FirecrawlCrawlResponse = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://firecrawl.dev", timeout: 1000 }); + it.concurrent( + "should return a timeout error when scraping takes longer than the specified timeout", + async () => { + const response: FirecrawlCrawlResponse = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://firecrawl.dev", timeout: 1000 }); - expect(response.statusCode).toBe(408); - }, 3000); + expect(response.statusCode).toBe(408); + }, + 3000 + ); }); describe("POST /v0/search", () => { @@ -462,26 +597,33 @@ describe("E2E Tests for API Routes", () => { expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response = await request(TEST_URL) - .post("/v0/search") - .set("Authorization", `Bearer invalid-api-key`) - .set("Content-Type", "application/json") - .send({ query: "test" }); - expect(response.statusCode).toBe(401); - }); + it.concurrent( + "should return an error response with an invalid API key", + async () => { + const response = await request(TEST_URL) + .post("/v0/search") + .set("Authorization", `Bearer invalid-api-key`) + .set("Content-Type", "application/json") + .send({ query: "test" }); + expect(response.statusCode).toBe(401); + } + ); - it.concurrent("should return a successful response with a valid API key for search", async () => { - const response = await request(TEST_URL) - .post("/v0/search") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ query: "test" }); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("success"); - expect(response.body.success).toBe(true); - expect(response.body).toHaveProperty("data"); - }, 30000); // 30 seconds timeout + it.concurrent( + "should return a successful response with a valid API key for search", + async () => { + const response = await request(TEST_URL) + .post("/v0/search") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ query: "test" }); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("success"); + expect(response.body.success).toBe(true); + expect(response.body).toHaveProperty("data"); + }, + 60000 + ); // 60 seconds timeout }); describe("GET /v0/crawl/status/:jobId", () => { @@ -490,62 +632,83 @@ describe("E2E Tests for API Routes", () => { expect(response.statusCode).toBe(401); }); - it.concurrent("should return an error response with an invalid API key", async () => { - const response = await request(TEST_URL) - .get("/v0/crawl/status/123") - .set("Authorization", `Bearer invalid-api-key`); - expect(response.statusCode).toBe(401); - }); - - it.concurrent("should return Job not found for invalid job ID", async () => { - const response = await request(TEST_URL) - .get("/v0/crawl/status/invalidJobId") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(404); - }); - - it.concurrent("should return a successful crawl status response for a valid crawl job", async () => { - const crawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://mendable.ai/blog" }); - expect(crawlResponse.statusCode).toBe(200); - - let isCompleted = false; - let completedResponse; - - while (!isCompleted) { + it.concurrent( + "should return an error response with an invalid API key", + async () => { const response = await request(TEST_URL) + .get("/v0/crawl/status/123") + .set("Authorization", `Bearer invalid-api-key`); + expect(response.statusCode).toBe(401); + } + ); + + it.concurrent( + "should return Job not found for invalid job ID", + async () => { + const response = await request(TEST_URL) + .get("/v0/crawl/status/invalidJobId") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(404); + } + ); + + it.concurrent( + "should return a successful crawl status response for a valid crawl job", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://mendable.ai/blog" }); + expect(crawlResponse.statusCode).toBe(200); + + let isCompleted = false; + + while (!isCompleted) { + const response = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(response.statusCode).toBe(200); + expect(response.body).toHaveProperty("status"); + + if (response.body.status === "completed") { + isCompleted = true; + } else { + await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database + const completedResponse = await request(TEST_URL) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(response.statusCode).toBe(200); - expect(response.body).toHaveProperty("status"); - if (response.body.status === "completed") { - isCompleted = true; - completedResponse = response; - } else { - await new Promise((r) => setTimeout(r, 1000)); // Wait for 1 second before checking again - } - } - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("completed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data[0]).toHaveProperty("content"); - expect(completedResponse.body.data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.data[0].content).toContain("Mendable"); - expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("completed"); + expect(completedResponse.body).toHaveProperty("data"); + expect(completedResponse.body.data[0]).toHaveProperty("content"); + expect(completedResponse.body.data[0]).toHaveProperty("markdown"); + expect(completedResponse.body.data[0]).toHaveProperty("metadata"); + expect(completedResponse.body.data[0].content).toContain("Mendable"); + expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe( + 200 + ); + expect( + completedResponse.body.data[0].metadata.pageError + ).toBeUndefined(); - const childrenLinks = completedResponse.body.data.filter(doc => - doc.metadata && doc.metadata.sourceURL && doc.metadata.sourceURL.includes("mendable.ai/blog") - ); + const childrenLinks = completedResponse.body.data.filter( + (doc) => + doc.metadata && + doc.metadata.sourceURL && + doc.metadata.sourceURL.includes("mendable.ai/blog") + ); + + expect(childrenLinks.length).toBe(completedResponse.body.data.length); + }, + 180000 + ); // 120 seconds - expect(childrenLinks.length).toBe(completedResponse.body.data.length); - }, 180000); // 120 seconds - // TODO: review the test below // it.concurrent('should return a successful response for a valid crawl job with PDF files without explicit .pdf extension ', async () => { // const crawlResponse = await request(TEST_URL) @@ -592,148 +755,118 @@ describe("E2E Tests for API Routes", () => { // expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); // }, 180000); // 120 seconds - it.concurrent("If someone cancels a crawl job, it should turn into failed status", async () => { - const crawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://jestjs.io" }); + it.concurrent( + "If someone cancels a crawl job, it should turn into failed status", + async () => { + const crawlResponse = await request(TEST_URL) + .post("/v0/crawl") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://docs.tatum.io", crawlerOptions: { limit: 200 } }); - expect(crawlResponse.statusCode).toBe(200); + expect(crawlResponse.statusCode).toBe(200); - await new Promise((r) => setTimeout(r, 20000)); + await new Promise((r) => setTimeout(r, 10000)); - const responseCancel = await request(TEST_URL) - .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(responseCancel.statusCode).toBe(200); - expect(responseCancel.body).toHaveProperty("status"); - expect(responseCancel.body.status).toBe("cancelled"); + const responseCancel = await request(TEST_URL) + .delete(`/v0/crawl/cancel/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + expect(responseCancel.statusCode).toBe(200); + expect(responseCancel.body).toHaveProperty("status"); + expect(responseCancel.body.status).toBe("cancelled"); - await new Promise((r) => setTimeout(r, 10000)); - const completedResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); + await new Promise((r) => setTimeout(r, 10000)); + const completedResponse = await request(TEST_URL) + .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - expect(completedResponse.statusCode).toBe(200); - expect(completedResponse.body).toHaveProperty("status"); - expect(completedResponse.body.status).toBe("failed"); - expect(completedResponse.body).toHaveProperty("data"); - expect(completedResponse.body.data).toBeNull(); - expect(completedResponse.body).toHaveProperty("partial_data"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("content"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("markdown"); - expect(completedResponse.body.partial_data[0]).toHaveProperty("metadata"); - expect(completedResponse.body.partial_data[0].metadata.pageStatusCode).toBe(200); - expect(completedResponse.body.partial_data[0].metadata.pageError).toBeUndefined(); - }, 60000); // 60 seconds + expect(completedResponse.statusCode).toBe(200); + expect(completedResponse.body).toHaveProperty("status"); + expect(completedResponse.body.status).toBe("failed"); + expect(completedResponse.body).toHaveProperty("data"); + + let isNullOrEmptyArray = false; + if ( + completedResponse.body.data === null || + completedResponse.body.data.length === 0 + ) { + isNullOrEmptyArray = true; + } + expect(isNullOrEmptyArray).toBe(true); + expect(completedResponse.body.data).toEqual(expect.arrayContaining([])); + expect(completedResponse.body).toHaveProperty("partial_data"); + expect(completedResponse.body.partial_data[0]).toHaveProperty( + "content" + ); + expect(completedResponse.body.partial_data[0]).toHaveProperty( + "markdown" + ); + expect(completedResponse.body.partial_data[0]).toHaveProperty( + "metadata" + ); + expect( + completedResponse.body.partial_data[0].metadata.pageStatusCode + ).toBe(200); + expect( + completedResponse.body.partial_data[0].metadata.pageError + ).toBeUndefined(); + }, + 60000 + ); // 60 seconds }); describe("POST /v0/scrape with LLM Extraction", () => { - it.concurrent("should extract data using LLM extraction mode", async () => { - const response = await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://mendable.ai", - pageOptions: { - onlyMainContent: true, - }, - extractorOptions: { - mode: "llm-extraction", - extractionPrompt: - "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", - extractionSchema: { - type: "object", - properties: { - company_mission: { - type: "string", - }, - supports_sso: { - type: "boolean", - }, - is_open_source: { - type: "boolean", - }, - }, - required: ["company_mission", "supports_sso", "is_open_source"], + it.concurrent( + "should extract data using LLM extraction mode", + async () => { + const response = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ + url: "https://mendable.ai", + pageOptions: { + onlyMainContent: true, }, - }, - }); + extractorOptions: { + mode: "llm-extraction", + extractionPrompt: + "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", + extractionSchema: { + type: "object", + properties: { + company_mission: { + type: "string", + }, + supports_sso: { + type: "boolean", + }, + is_open_source: { + type: "boolean", + }, + }, + required: ["company_mission", "supports_sso", "is_open_source"], + }, + }, + }); - // Ensure that the job was successfully created before proceeding with LLM extraction - expect(response.statusCode).toBe(200); + // Ensure that the job was successfully created before proceeding with LLM extraction + expect(response.statusCode).toBe(200); - // Assuming the LLM extraction object is available in the response body under `data.llm_extraction` - let llmExtraction = response.body.data.llm_extraction; + // Assuming the LLM extraction object is available in the response body under `data.llm_extraction` + let llmExtraction = response.body.data.llm_extraction; - // Check if the llm_extraction object has the required properties with correct types and values - expect(llmExtraction).toHaveProperty("company_mission"); - expect(typeof llmExtraction.company_mission).toBe("string"); - expect(llmExtraction).toHaveProperty("supports_sso"); - expect(llmExtraction.supports_sso).toBe(true); - expect(typeof llmExtraction.supports_sso).toBe("boolean"); - expect(llmExtraction).toHaveProperty("is_open_source"); - expect(llmExtraction.is_open_source).toBe(false); - expect(typeof llmExtraction.is_open_source).toBe("boolean"); - }, 60000); // 60 secs - }); - - describe("POST /v0/crawl with fast mode", () => { - it.concurrent("should complete the crawl under 20 seconds", async () => { - const startTime = Date.now(); - - const crawlResponse = await request(TEST_URL) - .post("/v0/crawl") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ - url: "https://flutterbricks.com", - crawlerOptions: { - mode: "fast" - } - }); - - expect(crawlResponse.statusCode).toBe(200); - - const jobId = crawlResponse.body.jobId; - let statusResponse; - let isFinished = false; - - while (!isFinished) { - statusResponse = await request(TEST_URL) - .get(`/v0/crawl/status/${jobId}`) - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); - - expect(statusResponse.statusCode).toBe(200); - isFinished = statusResponse.body.status === "completed"; - - if (!isFinished) { - await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again - } - } - - // const endTime = Date.now(); - // const timeElapsed = (endTime - startTime) / 1000; // Convert to seconds - - // console.log(`Time elapsed: ${timeElapsed} seconds`); - - expect(statusResponse.body.status).toBe("completed"); - expect(statusResponse.body).toHaveProperty("data"); - expect(statusResponse.body.data[0]).toHaveProperty("content"); - expect(statusResponse.body.data[0]).toHaveProperty("markdown"); - expect(statusResponse.body.data[0]).toHaveProperty("metadata"); - expect(statusResponse.body.data[0].metadata.pageStatusCode).toBe(200); - expect(statusResponse.body.data[0].metadata.pageError).toBeUndefined(); - - const results = statusResponse.body.data; - // results.forEach((result, i) => { - // console.log(result.metadata.sourceURL); - // }); - expect(results.length).toBeGreaterThanOrEqual(10); - expect(results.length).toBeLessThanOrEqual(15); - - }, 20000); + // Check if the llm_extraction object has the required properties with correct types and values + expect(llmExtraction).toHaveProperty("company_mission"); + expect(typeof llmExtraction.company_mission).toBe("string"); + expect(llmExtraction).toHaveProperty("supports_sso"); + expect(llmExtraction.supports_sso).toBe(true); + expect(typeof llmExtraction.supports_sso).toBe("boolean"); + expect(llmExtraction).toHaveProperty("is_open_source"); + expect(llmExtraction.is_open_source).toBe(false); + expect(typeof llmExtraction.is_open_source).toBe("boolean"); + }, + 60000 + ); // 60 secs }); }); diff --git a/apps/api/src/controllers/__tests__/crawl.test.ts b/apps/api/src/controllers/__tests__/crawl.test.ts index 621c7436..e65523cb 100644 --- a/apps/api/src/controllers/__tests__/crawl.test.ts +++ b/apps/api/src/controllers/__tests__/crawl.test.ts @@ -1,4 +1,4 @@ -import { crawlController } from '../crawl' +import { crawlController } from '../v0/crawl' import { Request, Response } from 'express'; import { authenticateUser } from '../auth'; // Ensure this import is correct import { createIdempotencyKey } from '../../services/idempotency/create'; diff --git a/apps/api/src/controllers/admin/queue.ts b/apps/api/src/controllers/admin/queue.ts deleted file mode 100644 index cb5f99ed..00000000 --- a/apps/api/src/controllers/admin/queue.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { Request, Response } from "express"; - -import { Job } from "bull"; -import { Logger } from "../../lib/logger"; -import { getWebScraperQueue } from "../../services/queue-service"; -import { checkAlerts } from "../../services/alerts"; - -export async function cleanBefore24hCompleteJobsController( - req: Request, - res: Response -) { - Logger.info("🐂 Cleaning jobs older than 24h"); - try { - const webScraperQueue = getWebScraperQueue(); - const batchSize = 10; - const numberOfBatches = 9; // Adjust based on your needs - const completedJobsPromises: Promise[] = []; - for (let i = 0; i < numberOfBatches; i++) { - completedJobsPromises.push( - webScraperQueue.getJobs( - ["completed"], - i * batchSize, - i * batchSize + batchSize, - true - ) - ); - } - const completedJobs: Job[] = ( - await Promise.all(completedJobsPromises) - ).flat(); - const before24hJobs = - completedJobs.filter( - (job) => job.finishedOn < Date.now() - 24 * 60 * 60 * 1000 - ) || []; - - let count = 0; - - if (!before24hJobs) { - return res.status(200).send(`No jobs to remove.`); - } - - for (const job of before24hJobs) { - try { - await job.remove(); - count++; - } catch (jobError) { - Logger.error(`🐂 Failed to remove job with ID ${job.id}: ${jobError}`); - } - } - return res.status(200).send(`Removed ${count} completed jobs.`); - } catch (error) { - Logger.error(`🐂 Failed to clean last 24h complete jobs: ${error}`); - return res.status(500).send("Failed to clean jobs"); - } -} - - -export async function checkQueuesController(req: Request, res: Response) { - try { - await checkAlerts(); - return res.status(200).send("Alerts initialized"); - } catch (error) { - Logger.debug(`Failed to initialize alerts: ${error}`); - return res.status(500).send("Failed to initialize alerts"); - } - } - - // Use this as a "health check" that way we dont destroy the server -export async function queuesController(req: Request, res: Response) { - try { - const webScraperQueue = getWebScraperQueue(); - - const [webScraperActive] = await Promise.all([ - webScraperQueue.getActiveCount(), - ]); - - const noActiveJobs = webScraperActive === 0; - // 200 if no active jobs, 503 if there are active jobs - return res.status(noActiveJobs ? 200 : 500).json({ - webScraperActive, - noActiveJobs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } - } \ No newline at end of file diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/auth.ts index 5dff80b8..d634b9ed 100644 --- a/apps/api/src/controllers/auth.ts +++ b/apps/api/src/controllers/auth.ts @@ -1,26 +1,97 @@ -import { parseApi } from "../../src/lib/parseApi"; -import { getRateLimiter, } from "../../src/services/rate-limiter"; -import { AuthResponse, NotificationType, RateLimiterMode } from "../../src/types"; -import { supabase_service } from "../../src/services/supabase"; -import { withAuth } from "../../src/lib/withAuth"; +import { parseApi } from "../lib/parseApi"; +import { getRateLimiter } from "../services/rate-limiter"; +import { + AuthResponse, + NotificationType, + PlanType, + RateLimiterMode, +} from "../types"; +import { supabase_service } from "../services/supabase"; +import { withAuth } from "../lib/withAuth"; import { RateLimiterRedis } from "rate-limiter-flexible"; -import { setTraceAttributes } from '@hyperdx/node-opentelemetry'; +import { setTraceAttributes } from "@hyperdx/node-opentelemetry"; import { sendNotification } from "../services/notification/email_notification"; import { Logger } from "../lib/logger"; - -export async function authenticateUser(req, res, mode?: RateLimiterMode): Promise { +import { redlock } from "../services/redlock"; +import { getValue } from "../services/redis"; +import { setValue } from "../services/redis"; +import { validate } from "uuid"; +import * as Sentry from "@sentry/node"; +// const { data, error } = await supabase_service +// .from('api_keys') +// .select(` +// key, +// team_id, +// teams ( +// subscriptions ( +// price_id +// ) +// ) +// `) +// .eq('key', normalizedApi) +// .limit(1) +// .single(); +function normalizedApiIsUuid(potentialUuid: string): boolean { + // Check if the string is a valid UUID + return validate(potentialUuid); +} +export async function authenticateUser( + req, + res, + mode?: RateLimiterMode +): Promise { return withAuth(supaAuthenticateUser)(req, res, mode); } function setTrace(team_id: string, api_key: string) { try { setTraceAttributes({ team_id, - api_key + api_key, }); } catch (error) { + Sentry.captureException(error); Logger.error(`Error setting trace attributes: ${error.message}`); } +} +async function getKeyAndPriceId(normalizedApi: string): Promise<{ + success: boolean; + teamId?: string; + priceId?: string; + error?: string; + status?: number; +}> { + const { data, error } = await supabase_service.rpc("get_key_and_price_id_2", { + api_key: normalizedApi, + }); + if (error) { + Sentry.captureException(error); + Logger.error(`RPC ERROR (get_key_and_price_id_2): ${error.message}`); + return { + success: false, + error: + "The server seems overloaded. Please contact hello@firecrawl.com if you aren't sending too many requests at once.", + status: 500, + }; + } + if (!data || data.length === 0) { + if (error) { + Logger.warn(`Error fetching api key: ${error.message} or data is empty`); + Sentry.captureException(error); + } + // TODO: change this error code ? + return { + success: false, + error: "Unauthorized: Invalid token", + status: 401, + }; + } else { + return { + success: true, + teamId: data[0].team_id, + priceId: data[0].price_id, + }; + } } export async function supaAuthenticateUser( req, @@ -31,9 +102,10 @@ export async function supaAuthenticateUser( team_id?: string; error?: string; status?: number; - plan?: string; + plan?: PlanType; }> { - const authHeader = req.headers.authorization; + + const authHeader = req.headers.authorization ?? (req.headers["sec-websocket-protocol"] ? `Bearer ${req.headers["sec-websocket-protocol"]}` : null); if (!authHeader) { return { success: false, error: "Unauthorized", status: 401 }; } @@ -51,20 +123,88 @@ export async function supaAuthenticateUser( const iptoken = incomingIP + token; let rateLimiter: RateLimiterRedis; - let subscriptionData: { team_id: string, plan: string } | null = null; + let subscriptionData: { team_id: string; plan: string } | null = null; let normalizedApi: string; - let team_id: string; + let cacheKey = ""; + let redLockKey = ""; + const lockTTL = 15000; // 10 seconds + let teamId: string | null = null; + let priceId: string | null = null; if (token == "this_is_just_a_preview_token") { - rateLimiter = getRateLimiter(RateLimiterMode.Preview, token); - team_id = "preview"; + if (mode == RateLimiterMode.CrawlStatus) { + rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token); + } else { + rateLimiter = getRateLimiter(RateLimiterMode.Preview, token); + } + teamId = "preview"; } else { normalizedApi = parseApi(token); + if (!normalizedApiIsUuid(normalizedApi)) { + return { + success: false, + error: "Unauthorized: Invalid token", + status: 401, + }; + } + + cacheKey = `api_key:${normalizedApi}`; + + try { + const teamIdPriceId = await getValue(cacheKey); + if (teamIdPriceId) { + const { team_id, price_id } = JSON.parse(teamIdPriceId); + teamId = team_id; + priceId = price_id; + } else { + const { + success, + teamId: tId, + priceId: pId, + error, + status, + } = await getKeyAndPriceId(normalizedApi); + if (!success) { + return { success, error, status }; + } + teamId = tId; + priceId = pId; + await setValue( + cacheKey, + JSON.stringify({ team_id: teamId, price_id: priceId }), + 60 + ); + } + } catch (error) { + Sentry.captureException(error); + Logger.error(`Error with auth function: ${error}`); + // const { + // success, + // teamId: tId, + // priceId: pId, + // error: e, + // status, + // } = await getKeyAndPriceId(normalizedApi); + // if (!success) { + // return { success, error: e, status }; + // } + // teamId = tId; + // priceId = pId; + // const { + // success, + // teamId: tId, + // priceId: pId, + // error: e, + // status, + // } = await getKeyAndPriceId(normalizedApi); + // if (!success) { + // return { success, error: e, status }; + // } + // teamId = tId; + // priceId = pId; + } - const { data, error } = await supabase_service.rpc( - 'get_key_and_price_id_2', { api_key: normalizedApi } - ); // get_key_and_price_id_2 rpc definition: // create or replace function get_key_and_price_id_2(api_key uuid) // returns table(key uuid, team_id uuid, price_id text) as $$ @@ -82,46 +222,47 @@ export async function supaAuthenticateUser( // end; // $$ language plpgsql; - if (error) { - Logger.warn(`Error fetching key and price_id: ${error.message}`); - } else { - // console.log('Key and Price ID:', data); - } - - - - if (error || !data || data.length === 0) { - Logger.warn(`Error fetching api key: ${error.message} or data is empty`); - return { - success: false, - error: "Unauthorized: Invalid token", - status: 401, - }; - } - const internal_team_id = data[0].team_id; - team_id = internal_team_id; - - const plan = getPlanByPriceId(data[0].price_id); + const plan = getPlanByPriceId(priceId); // HyperDX Logging - setTrace(team_id, normalizedApi); + setTrace(teamId, normalizedApi); subscriptionData = { - team_id: team_id, - plan: plan - } + team_id: teamId, + plan: plan, + }; switch (mode) { case RateLimiterMode.Crawl: - rateLimiter = getRateLimiter(RateLimiterMode.Crawl, token, subscriptionData.plan); + rateLimiter = getRateLimiter( + RateLimiterMode.Crawl, + token, + subscriptionData.plan + ); break; case RateLimiterMode.Scrape: - rateLimiter = getRateLimiter(RateLimiterMode.Scrape, token, subscriptionData.plan); + rateLimiter = getRateLimiter( + RateLimiterMode.Scrape, + token, + subscriptionData.plan, + teamId + ); break; case RateLimiterMode.Search: - rateLimiter = getRateLimiter(RateLimiterMode.Search, token, subscriptionData.plan); + rateLimiter = getRateLimiter( + RateLimiterMode.Search, + token, + subscriptionData.plan + ); + break; + case RateLimiterMode.Map: + rateLimiter = getRateLimiter( + RateLimiterMode.Map, + token, + subscriptionData.plan + ); break; case RateLimiterMode.CrawlStatus: rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token); break; - + case RateLimiterMode.Preview: rateLimiter = getRateLimiter(RateLimiterMode.Preview, token); break; @@ -134,7 +275,8 @@ export async function supaAuthenticateUser( } } - const team_endpoint_token = token === "this_is_just_a_preview_token" ? iptoken : team_id; + const team_endpoint_token = + token === "this_is_just_a_preview_token" ? iptoken : teamId; try { await rateLimiter.consume(team_endpoint_token); @@ -147,17 +289,32 @@ export async function supaAuthenticateUser( const startDate = new Date(); const endDate = new Date(); endDate.setDate(endDate.getDate() + 7); + // await sendNotification(team_id, NotificationType.RATE_LIMIT_REACHED, startDate.toISOString(), endDate.toISOString()); + // Cache longer for 429s + if (teamId && priceId && mode !== RateLimiterMode.Preview) { + await setValue( + cacheKey, + JSON.stringify({ team_id: teamId, price_id: priceId }), + 60 // 10 seconds, cache for everything + ); + } + return { success: false, - error: `Rate limit exceeded. Consumed points: ${rateLimiterRes.consumedPoints}, Remaining points: ${rateLimiterRes.remainingPoints}. Upgrade your plan at https://firecrawl.dev/pricing for increased rate limits or please retry after ${secs}s, resets at ${retryDate}`, + error: `Rate limit exceeded. Consumed (req/min): ${rateLimiterRes.consumedPoints}, Remaining (req/min): ${rateLimiterRes.remainingPoints}. Upgrade your plan at https://firecrawl.dev/pricing for increased rate limits or please retry after ${secs}s, resets at ${retryDate}`, status: 429, }; } if ( token === "this_is_just_a_preview_token" && - (mode === RateLimiterMode.Scrape || mode === RateLimiterMode.Preview || mode === RateLimiterMode.Search) + (mode === RateLimiterMode.Scrape || + mode === RateLimiterMode.Preview || + mode === RateLimiterMode.Map || + mode === RateLimiterMode.Crawl || + mode === RateLimiterMode.CrawlStatus || + mode === RateLimiterMode.Search) ) { return { success: true, team_id: "preview" }; // check the origin of the request and make sure its from firecrawl.dev @@ -181,10 +338,11 @@ export async function supaAuthenticateUser( .select("*") .eq("key", normalizedApi); - - if (error || !data || data.length === 0) { - Logger.warn(`Error fetching api key: ${error.message} or data is empty`); + if (error) { + Sentry.captureException(error); + Logger.warn(`Error fetching api key: ${error.message} or data is empty`); + } return { success: false, error: "Unauthorized: Invalid token", @@ -195,26 +353,32 @@ export async function supaAuthenticateUser( subscriptionData = data[0]; } - return { success: true, team_id: subscriptionData.team_id, plan: subscriptionData.plan ?? ""}; + return { + success: true, + team_id: subscriptionData.team_id, + plan: (subscriptionData.plan ?? "") as PlanType, + }; } -function getPlanByPriceId(price_id: string) { +function getPlanByPriceId(price_id: string): PlanType { switch (price_id) { case process.env.STRIPE_PRICE_ID_STARTER: - return 'starter'; + return "starter"; case process.env.STRIPE_PRICE_ID_STANDARD: - return 'standard'; + return "standard"; case process.env.STRIPE_PRICE_ID_SCALE: - return 'scale'; + return "scale"; case process.env.STRIPE_PRICE_ID_HOBBY: case process.env.STRIPE_PRICE_ID_HOBBY_YEARLY: - return 'hobby'; + return "hobby"; case process.env.STRIPE_PRICE_ID_STANDARD_NEW: case process.env.STRIPE_PRICE_ID_STANDARD_NEW_YEARLY: - return 'standardnew'; + return "standardnew"; case process.env.STRIPE_PRICE_ID_GROWTH: case process.env.STRIPE_PRICE_ID_GROWTH_YEARLY: - return 'growth'; + return "growth"; + case process.env.STRIPE_PRICE_ID_GROWTH_DOUBLE_MONTHLY: + return "growthdouble"; default: - return 'free'; + return "free"; } -} \ No newline at end of file +} diff --git a/apps/api/src/controllers/crawl-status.ts b/apps/api/src/controllers/crawl-status.ts deleted file mode 100644 index 82a49db8..00000000 --- a/apps/api/src/controllers/crawl-status.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { getWebScraperQueue } from "../../src/services/queue-service"; -import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlStatusController(req: Request, res: Response) { - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.CrawlStatus - ); - if (!success) { - return res.status(status).json({ error }); - } - const job = await getWebScraperQueue().getJob(req.params.jobId); - if (!job) { - return res.status(404).json({ error: "Job not found" }); - } - - const { current, current_url, total, current_step, partialDocs } = await job.progress(); - - let data = job.returnvalue; - const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; - if (useDbAuthentication) { - const supabaseData = await supabaseGetJobById(req.params.jobId); - - if (supabaseData) { - data = supabaseData.docs; - } - } - - const jobStatus = await job.getState(); - - res.json({ - status: jobStatus, - // progress: job.progress(), - current, - current_url, - current_step, - total, - data: data ? data : null, - partial_data: jobStatus == 'completed' ? [] : partialDocs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts deleted file mode 100644 index 9480c63b..00000000 --- a/apps/api/src/controllers/crawl.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../../src/scraper/WebScraper"; -import { billTeam } from "../../src/services/billing/credit_billing"; -import { checkTeamCredits } from "../../src/services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { isUrlBlocked } from "../../src/scraper/WebScraper/utils/blocklist"; -import { logCrawl } from "../../src/services/logging/crawl_log"; -import { validateIdempotencyKey } from "../../src/services/idempotency/validate"; -import { createIdempotencyKey } from "../../src/services/idempotency/create"; -import { defaultCrawlPageOptions, defaultCrawlerOptions, defaultOrigin } from "../../src/lib/default-values"; -import { v4 as uuidv4 } from "uuid"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlController(req: Request, res: Response) { - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.Crawl - ); - if (!success) { - return res.status(status).json({ error }); - } - - if (req.headers["x-idempotency-key"]) { - const isIdempotencyValid = await validateIdempotencyKey(req); - if (!isIdempotencyValid) { - return res.status(409).json({ error: "Idempotency key already used" }); - } - try { - createIdempotencyKey(req); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } - } - - const { success: creditsCheckSuccess, message: creditsCheckMessage } = - await checkTeamCredits(team_id, 1); - if (!creditsCheckSuccess) { - return res.status(402).json({ error: "Insufficient credits" }); - } - - const url = req.body.url; - if (!url) { - return res.status(400).json({ error: "Url is required" }); - } - - if (isUrlBlocked(url)) { - return res - .status(403) - .json({ - error: - "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", - }); - } - - const mode = req.body.mode ?? "crawl"; - - const crawlerOptions = { ...defaultCrawlerOptions, ...req.body.crawlerOptions }; - const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions }; - - if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? - try { - const a = new WebScraperDataProvider(); - await a.setOptions({ - jobId: uuidv4(), - mode: "single_urls", - urls: [url], - crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, - pageOptions: pageOptions, - }); - - const docs = await a.getDocuments(false, (progress) => { - job.progress({ - current: progress.current, - total: progress.total, - current_step: "SCRAPING", - current_url: progress.currentDocumentUrl, - }); - }); - return res.json({ - success: true, - documents: docs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } - } - - const job = await addWebScraperJob({ - url: url, - mode: mode ?? "crawl", // fix for single urls not working - crawlerOptions: crawlerOptions, - team_id: team_id, - pageOptions: pageOptions, - origin: req.body.origin ?? defaultOrigin, - }); - - await logCrawl(job.id.toString(), team_id); - - res.json({ jobId: job.id }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/crawlPreview.ts b/apps/api/src/controllers/crawlPreview.ts deleted file mode 100644 index 7c5c804d..00000000 --- a/apps/api/src/controllers/crawlPreview.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { isUrlBlocked } from "../../src/scraper/WebScraper/utils/blocklist"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlPreviewController(req: Request, res: Response) { - try { - const { success, team_id, error, status } = await authenticateUser( - req, - res, - RateLimiterMode.Preview - ); - if (!success) { - return res.status(status).json({ error }); - } - // authenticate on supabase - const url = req.body.url; - if (!url) { - return res.status(400).json({ error: "Url is required" }); - } - - if (isUrlBlocked(url)) { - return res.status(403).json({ error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." }); - } - - const mode = req.body.mode ?? "crawl"; - const crawlerOptions = req.body.crawlerOptions ?? {}; - const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false, removeTags: [] }; - - const job = await addWebScraperJob({ - url: url, - mode: mode ?? "crawl", // fix for single urls not working - crawlerOptions: { ...crawlerOptions, limit: 5, maxCrawledLinks: 5 }, - team_id: "preview", - pageOptions: pageOptions, - origin: "website-preview", - }); - - res.json({ jobId: job.id }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/scrape.ts b/apps/api/src/controllers/scrape.ts deleted file mode 100644 index 6c94f4c0..00000000 --- a/apps/api/src/controllers/scrape.ts +++ /dev/null @@ -1,196 +0,0 @@ -import { ExtractorOptions, PageOptions } from './../lib/entities'; -import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../scraper/WebScraper"; -import { billTeam, checkTeamCredits } from "../services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../types"; -import { logJob } from "../services/logging/log_job"; -import { Document } from "../lib/entities"; -import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function -import { numTokensFromString } from '../lib/LLM-extraction/helpers'; -import { defaultPageOptions, defaultExtractorOptions, defaultTimeout, defaultOrigin } from '../lib/default-values'; -import { v4 as uuidv4 } from "uuid"; -import { Logger } from '../lib/logger'; - -export async function scrapeHelper( - jobId: string, - req: Request, - team_id: string, - crawlerOptions: any, - pageOptions: PageOptions, - extractorOptions: ExtractorOptions, - timeout: number, - plan?: string -): Promise<{ - success: boolean; - error?: string; - data?: Document; - returnCode: number; -}> { - const url = req.body.url; - if (!url) { - return { success: false, error: "Url is required", returnCode: 400 }; - } - - if (isUrlBlocked(url)) { - return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 }; - } - - const a = new WebScraperDataProvider(); - await a.setOptions({ - jobId, - mode: "single_urls", - urls: [url], - crawlerOptions: { - ...crawlerOptions, - }, - pageOptions: pageOptions, - extractorOptions: extractorOptions, - }); - - const timeoutPromise = new Promise<{ success: boolean; error?: string; returnCode: number }>((_, reject) => - setTimeout(() => reject({ success: false, error: "Request timed out. Increase the timeout by passing `timeout` param to the request.", returnCode: 408 }), timeout) - ); - - const docsPromise = a.getDocuments(false); - - let docs; - try { - docs = await Promise.race([docsPromise, timeoutPromise]); - } catch (error) { - return error; - } - - // make sure doc.content is not empty - let filteredDocs = docs.filter( - (doc: { content?: string }) => doc.content && doc.content.trim().length > 0 - ); - if (filteredDocs.length === 0) { - return { success: true, error: "No page found", returnCode: 200, data: docs[0] }; - } - - - // Remove rawHtml if pageOptions.rawHtml is false and extractorOptions.mode is llm-extraction-from-raw-html - if (!pageOptions.includeRawHtml && extractorOptions.mode == "llm-extraction-from-raw-html") { - filteredDocs.forEach(doc => { - delete doc.rawHtml; - }); - } - - return { - success: true, - data: filteredDocs[0], - returnCode: 200, - }; -} - -export async function scrapeController(req: Request, res: Response) { - try { - let earlyReturn = false; - // make sure to authenticate user first, Bearer - const { success, team_id, error, status, plan } = await authenticateUser( - req, - res, - RateLimiterMode.Scrape - ); - if (!success) { - return res.status(status).json({ error }); - } - - const crawlerOptions = req.body.crawlerOptions ?? {}; - const pageOptions = { ...defaultPageOptions, ...req.body.pageOptions }; - const extractorOptions = { ...defaultExtractorOptions, ...req.body.extractorOptions }; - const origin = req.body.origin ?? defaultOrigin; - let timeout = req.body.timeout ?? defaultTimeout; - - if (extractorOptions.mode.includes("llm-extraction")) { - pageOptions.onlyMainContent = true; - timeout = req.body.timeout ?? 90000; - } - - const checkCredits = async () => { - try { - const { success: creditsCheckSuccess, message: creditsCheckMessage } = await checkTeamCredits(team_id, 1); - if (!creditsCheckSuccess) { - earlyReturn = true; - return res.status(402).json({ error: "Insufficient credits" }); - } - } catch (error) { - Logger.error(error); - earlyReturn = true; - return res.status(500).json({ error: "Error checking team credits. Please contact hello@firecrawl.com for help." }); - } - }; - - - await checkCredits(); - - const jobId = uuidv4(); - - const startTime = new Date().getTime(); - const result = await scrapeHelper( - jobId, - req, - team_id, - crawlerOptions, - pageOptions, - extractorOptions, - timeout, - plan - ); - const endTime = new Date().getTime(); - const timeTakenInSeconds = (endTime - startTime) / 1000; - const numTokens = (result.data && result.data.markdown) ? numTokensFromString(result.data.markdown, "gpt-3.5-turbo") : 0; - - if (result.success) { - let creditsToBeBilled = 1; // Assuming 1 credit per document - const creditsPerLLMExtract = 50; - - if (extractorOptions.mode.includes("llm-extraction")) { - // creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length); - creditsToBeBilled += creditsPerLLMExtract; - } - - let startTimeBilling = new Date().getTime(); - - if (earlyReturn) { - // Don't bill if we're early returning - return; - } - const billingResult = await billTeam( - team_id, - creditsToBeBilled - ); - if (!billingResult.success) { - return res.status(402).json({ - success: false, - error: "Failed to bill team. Insufficient credits or subscription not found.", - }); - } - } - - logJob({ - job_id: jobId, - success: result.success, - message: result.error, - num_docs: 1, - docs: [result.data], - time_taken: timeTakenInSeconds, - team_id: team_id, - mode: "scrape", - url: req.body.url, - crawlerOptions: crawlerOptions, - pageOptions: pageOptions, - origin: origin, - extractor_options: extractorOptions, - num_tokens: numTokens, - }); - - - - return res.status(result.returnCode).json(result); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/status.ts b/apps/api/src/controllers/status.ts deleted file mode 100644 index 935338bd..00000000 --- a/apps/api/src/controllers/status.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { Request, Response } from "express"; -import { getWebScraperQueue } from "../../src/services/queue-service"; -import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; -import { Logger } from "../../src/lib/logger"; - -export async function crawlJobStatusPreviewController(req: Request, res: Response) { - try { - const job = await getWebScraperQueue().getJob(req.params.jobId); - if (!job) { - return res.status(404).json({ error: "Job not found" }); - } - - const { current, current_url, total, current_step, partialDocs } = await job.progress(); - let data = job.returnvalue; - const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; - if (useDbAuthentication) { - const supabaseData = await supabaseGetJobById(req.params.jobId); - - if (supabaseData) { - data = supabaseData.docs; - } - } - - let jobStatus = await job.getState(); - if (jobStatus === 'waiting' || jobStatus === 'stuck') { - jobStatus = 'active'; - } - - res.json({ - status: jobStatus, - // progress: job.progress(), - current, - current_url, - current_step, - total, - data: data ? data : null, - partial_data: jobStatus == 'completed' ? [] : partialDocs, - }); - } catch (error) { - Logger.error(error); - return res.status(500).json({ error: error.message }); - } -} diff --git a/apps/api/src/controllers/v0/admin/queue.ts b/apps/api/src/controllers/v0/admin/queue.ts new file mode 100644 index 00000000..71748002 --- /dev/null +++ b/apps/api/src/controllers/v0/admin/queue.ts @@ -0,0 +1,199 @@ +import { Request, Response } from "express"; + +import { Job } from "bullmq"; +import { Logger } from "../../../lib/logger"; +import { getScrapeQueue } from "../../../services/queue-service"; +import { checkAlerts } from "../../../services/alerts"; +import { sendSlackWebhook } from "../../../services/alerts/slack"; + +export async function cleanBefore24hCompleteJobsController( + req: Request, + res: Response +) { + Logger.info("🐂 Cleaning jobs older than 24h"); + try { + const scrapeQueue = getScrapeQueue(); + const batchSize = 10; + const numberOfBatches = 9; // Adjust based on your needs + const completedJobsPromises: Promise[] = []; + for (let i = 0; i < numberOfBatches; i++) { + completedJobsPromises.push( + scrapeQueue.getJobs( + ["completed"], + i * batchSize, + i * batchSize + batchSize, + true + ) + ); + } + const completedJobs: Job[] = ( + await Promise.all(completedJobsPromises) + ).flat(); + const before24hJobs = + completedJobs.filter( + (job) => job.finishedOn < Date.now() - 24 * 60 * 60 * 1000 + ) || []; + + let count = 0; + + if (!before24hJobs) { + return res.status(200).send(`No jobs to remove.`); + } + + for (const job of before24hJobs) { + try { + await job.remove(); + count++; + } catch (jobError) { + Logger.error(`🐂 Failed to remove job with ID ${job.id}: ${jobError}`); + } + } + return res.status(200).send(`Removed ${count} completed jobs.`); + } catch (error) { + Logger.error(`🐂 Failed to clean last 24h complete jobs: ${error}`); + return res.status(500).send("Failed to clean jobs"); + } +} + +export async function checkQueuesController(req: Request, res: Response) { + try { + await checkAlerts(); + return res.status(200).send("Alerts initialized"); + } catch (error) { + Logger.debug(`Failed to initialize alerts: ${error}`); + return res.status(500).send("Failed to initialize alerts"); + } +} + +// Use this as a "health check" that way we dont destroy the server +export async function queuesController(req: Request, res: Response) { + try { + const scrapeQueue = getScrapeQueue(); + + const [webScraperActive] = await Promise.all([ + scrapeQueue.getActiveCount(), + ]); + + const noActiveJobs = webScraperActive === 0; + // 200 if no active jobs, 503 if there are active jobs + return res.status(noActiveJobs ? 200 : 500).json({ + webScraperActive, + noActiveJobs, + }); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} + +export async function autoscalerController(req: Request, res: Response) { + try { + const maxNumberOfMachines = 80; + const minNumberOfMachines = 20; + + const scrapeQueue = getScrapeQueue(); + + const [webScraperActive, webScraperWaiting, webScraperPriority] = + await Promise.all([ + scrapeQueue.getActiveCount(), + scrapeQueue.getWaitingCount(), + scrapeQueue.getPrioritizedCount(), + ]); + + let waitingAndPriorityCount = webScraperWaiting + webScraperPriority; + + // get number of machines active + const request = await fetch( + "https://api.machines.dev/v1/apps/firecrawl-scraper-js/machines", + { + headers: { + Authorization: `Bearer ${process.env.FLY_API_TOKEN}`, + }, + } + ); + const machines = await request.json(); + + // Only worker machines + const activeMachines = machines.filter( + (machine) => + (machine.state === "started" || + machine.state === "starting" || + machine.state === "replacing") && + machine.config.env["FLY_PROCESS_GROUP"] === "worker" + ).length; + + let targetMachineCount = activeMachines; + + const baseScaleUp = 10; + // Slow scale down + const baseScaleDown = 2; + + // Scale up logic + if (webScraperActive > 9000 || waitingAndPriorityCount > 2000) { + targetMachineCount = Math.min( + maxNumberOfMachines, + activeMachines + baseScaleUp * 3 + ); + } else if (webScraperActive > 5000 || waitingAndPriorityCount > 1000) { + targetMachineCount = Math.min( + maxNumberOfMachines, + activeMachines + baseScaleUp * 2 + ); + } else if (webScraperActive > 1000 || waitingAndPriorityCount > 500) { + targetMachineCount = Math.min( + maxNumberOfMachines, + activeMachines + baseScaleUp + ); + } + + // Scale down logic + if (webScraperActive < 100 && waitingAndPriorityCount < 50) { + targetMachineCount = Math.max( + minNumberOfMachines, + activeMachines - baseScaleDown * 3 + ); + } else if (webScraperActive < 500 && waitingAndPriorityCount < 200) { + targetMachineCount = Math.max( + minNumberOfMachines, + activeMachines - baseScaleDown * 2 + ); + } else if (webScraperActive < 1000 && waitingAndPriorityCount < 500) { + targetMachineCount = Math.max( + minNumberOfMachines, + activeMachines - baseScaleDown + ); + } + + if (targetMachineCount !== activeMachines) { + Logger.info( + `🐂 Scaling from ${activeMachines} to ${targetMachineCount} - ${webScraperActive} active, ${webScraperWaiting} waiting` + ); + + if (targetMachineCount > activeMachines) { + sendSlackWebhook( + `🐂 Scaling from ${activeMachines} to ${targetMachineCount} - ${webScraperActive} active, ${webScraperWaiting} waiting - Current DateTime: ${new Date().toISOString()}`, + false, + process.env.SLACK_AUTOSCALER ?? "" + ); + } else { + sendSlackWebhook( + `🐂 Scaling from ${activeMachines} to ${targetMachineCount} - ${webScraperActive} active, ${webScraperWaiting} waiting - Current DateTime: ${new Date().toISOString()}`, + false, + process.env.SLACK_AUTOSCALER ?? "" + ); + } + return res.status(200).json({ + mode: "scale-descale", + count: targetMachineCount, + }); + } + + return res.status(200).json({ + mode: "normal", + count: activeMachines, + }); + } catch (error) { + Logger.error(error); + return res.status(500).send("Failed to initialize autoscaler"); + } +} diff --git a/apps/api/src/controllers/admin/redis-health.ts b/apps/api/src/controllers/v0/admin/redis-health.ts similarity index 95% rename from apps/api/src/controllers/admin/redis-health.ts rename to apps/api/src/controllers/v0/admin/redis-health.ts index 3b1e2518..dc58d745 100644 --- a/apps/api/src/controllers/admin/redis-health.ts +++ b/apps/api/src/controllers/v0/admin/redis-health.ts @@ -1,7 +1,7 @@ import { Request, Response } from "express"; import Redis from "ioredis"; -import { Logger } from "../../lib/logger"; -import { redisRateLimitClient } from "../../services/rate-limiter"; +import { Logger } from "../../../lib/logger"; +import { redisRateLimitClient } from "../../../services/rate-limiter"; export async function redisHealthController(req: Request, res: Response) { const retryOperation = async (operation, retries = 3) => { diff --git a/apps/api/src/controllers/crawl-cancel.ts b/apps/api/src/controllers/v0/crawl-cancel.ts similarity index 50% rename from apps/api/src/controllers/crawl-cancel.ts rename to apps/api/src/controllers/v0/crawl-cancel.ts index d0c109ec..bf1c2d0a 100644 --- a/apps/api/src/controllers/crawl-cancel.ts +++ b/apps/api/src/controllers/v0/crawl-cancel.ts @@ -1,11 +1,10 @@ import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../../src/types"; -import { addWebScraperJob } from "../../src/services/queue-jobs"; -import { getWebScraperQueue } from "../../src/services/queue-service"; -import { supabase_service } from "../../src/services/supabase"; -import { billTeam } from "../../src/services/billing/credit_billing"; -import { Logger } from "../../src/lib/logger"; +import { authenticateUser } from "../auth"; +import { RateLimiterMode } from "../../../src/types"; +import { supabase_service } from "../../../src/services/supabase"; +import { Logger } from "../../../src/lib/logger"; +import { getCrawl, saveCrawl } from "../../../src/lib/crawl-redis"; +import * as Sentry from "@sentry/node"; export async function crawlCancelController(req: Request, res: Response) { try { @@ -19,8 +18,9 @@ export async function crawlCancelController(req: Request, res: Response) { if (!success) { return res.status(status).json({ error }); } - const job = await getWebScraperQueue().getJob(req.params.jobId); - if (!job) { + + const sc = await getCrawl(req.params.jobId); + if (!sc) { return res.status(404).json({ error: "Job not found" }); } @@ -40,31 +40,18 @@ export async function crawlCancelController(req: Request, res: Response) { } } - const jobState = await job.getState(); - const { partialDocs } = await job.progress(); - - if (partialDocs && partialDocs.length > 0 && jobState === "active") { - Logger.info("Billing team for partial docs..."); - // Note: the credits that we will bill them here might be lower than the actual - // due to promises that are not yet resolved - await billTeam(team_id, partialDocs.length); - } - try { - await getWebScraperQueue().client.del(job.lockKey()); - await job.takeLock(); - await job.discard(); - await job.moveToFailed(Error("Job cancelled by user"), true); + sc.cancelled = true; + await saveCrawl(req.params.jobId, sc); } catch (error) { Logger.error(error); } - const newJobState = await job.getState(); - res.json({ status: "cancelled" }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/v0/crawl-status.ts b/apps/api/src/controllers/v0/crawl-status.ts new file mode 100644 index 00000000..b0649cd0 --- /dev/null +++ b/apps/api/src/controllers/v0/crawl-status.ts @@ -0,0 +1,84 @@ +import { Request, Response } from "express"; +import { authenticateUser } from "../auth"; +import { RateLimiterMode } from "../../../src/types"; +import { getScrapeQueue } from "../../../src/services/queue-service"; +import { Logger } from "../../../src/lib/logger"; +import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis"; +import { supabaseGetJobsById } from "../../../src/lib/supabase-jobs"; +import * as Sentry from "@sentry/node"; + +export async function getJobs(ids: string[]) { + const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x); + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobsById(ids); + + supabaseData.forEach(x => { + const job = jobs.find(y => y.id === x.job_id); + if (job) { + job.returnvalue = x.docs; + } + }) + } + + jobs.forEach(job => { + job.returnvalue = Array.isArray(job.returnvalue) ? job.returnvalue[0] : job.returnvalue; + }); + + return jobs; +} + +export async function crawlStatusController(req: Request, res: Response) { + try { + const { success, team_id, error, status } = await authenticateUser( + req, + res, + RateLimiterMode.CrawlStatus + ); + if (!success) { + return res.status(status).json({ error }); + } + + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ error: "Job not found" }); + } + + if (sc.team_id !== team_id) { + return res.status(403).json({ error: "Forbidden" }); + } + + const jobIDs = await getCrawlJobs(req.params.jobId); + + const jobs = (await getJobs(jobIDs)).sort((a, b) => a.timestamp - b.timestamp); + const jobStatuses = await Promise.all(jobs.map(x => x.getState())); + const jobStatus = sc.cancelled ? "failed" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "active"; + + const data = jobs.map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue); + + if ( + jobs.length > 0 && + jobs[0].data && + jobs[0].data.pageOptions && + !jobs[0].data.pageOptions.includeRawHtml + ) { + data.forEach(item => { + if (item) { + delete item.rawHtml; + } + }); + } + + res.json({ + status: jobStatus, + current: jobStatuses.filter(x => x === "completed" || x === "failed").length, + total: jobs.length, + data: jobStatus === "completed" ? data : null, + partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), + }); + } catch (error) { + Sentry.captureException(error); + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v0/crawl.ts b/apps/api/src/controllers/v0/crawl.ts new file mode 100644 index 00000000..aefdb5e5 --- /dev/null +++ b/apps/api/src/controllers/v0/crawl.ts @@ -0,0 +1,232 @@ +import { Request, Response } from "express"; +import { checkTeamCredits } from "../../../src/services/billing/credit_billing"; +import { authenticateUser } from "../auth"; +import { RateLimiterMode } from "../../../src/types"; +import { addScrapeJob } from "../../../src/services/queue-jobs"; +import { isUrlBlocked } from "../../../src/scraper/WebScraper/utils/blocklist"; +import { logCrawl } from "../../../src/services/logging/crawl_log"; +import { validateIdempotencyKey } from "../../../src/services/idempotency/validate"; +import { createIdempotencyKey } from "../../../src/services/idempotency/create"; +import { defaultCrawlPageOptions, defaultCrawlerOptions, defaultOrigin } from "../../../src/lib/default-values"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../../src/lib/logger"; +import { addCrawlJob, addCrawlJobs, crawlToCrawler, lockURL, lockURLs, saveCrawl, StoredCrawl } from "../../../src/lib/crawl-redis"; +import { getScrapeQueue } from "../../../src/services/queue-service"; +import { checkAndUpdateURL } from "../../../src/lib/validateUrl"; +import * as Sentry from "@sentry/node"; +import { getJobPriority } from "../../lib/job-priority"; + +export async function crawlController(req: Request, res: Response) { + try { + const { success, team_id, error, status, plan } = await authenticateUser( + req, + res, + RateLimiterMode.Crawl + ); + if (!success) { + return res.status(status).json({ error }); + } + + if (req.headers["x-idempotency-key"]) { + const isIdempotencyValid = await validateIdempotencyKey(req); + if (!isIdempotencyValid) { + return res.status(409).json({ error: "Idempotency key already used" }); + } + try { + createIdempotencyKey(req); + } catch (error) { + Logger.error(error); + return res.status(500).json({ error: error.message }); + } + } + + const crawlerOptions = { + ...defaultCrawlerOptions, + ...req.body.crawlerOptions, + }; + const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions }; + + if (Array.isArray(crawlerOptions.includes)) { + for (const x of crawlerOptions.includes) { + try { + new RegExp(x); + } catch (e) { + return res.status(400).json({ error: e.message }); + } + } + } + + if (Array.isArray(crawlerOptions.excludes)) { + for (const x of crawlerOptions.excludes) { + try { + new RegExp(x); + } catch (e) { + return res.status(400).json({ error: e.message }); + } + } + } + + const limitCheck = req.body?.crawlerOptions?.limit ?? 1; + const { success: creditsCheckSuccess, message: creditsCheckMessage, remainingCredits } = + await checkTeamCredits(team_id, limitCheck); + + if (!creditsCheckSuccess) { + return res.status(402).json({ error: "Insufficient credits. You may be requesting with a higher limit than the amount of credits you have left. If not, upgrade your plan at https://firecrawl.dev/pricing or contact us at hello@firecrawl.com" }); + } + + // TODO: need to do this to v1 + crawlerOptions.limit = Math.min(remainingCredits, crawlerOptions.limit); + + let url = req.body.url; + if (!url) { + return res.status(400).json({ error: "Url is required" }); + } + if (typeof url !== "string") { + return res.status(400).json({ error: "URL must be a string" }); + } + try { + url = checkAndUpdateURL(url).url; + } catch (e) { + return res + .status(e instanceof Error && e.message === "Invalid URL" ? 400 : 500) + .json({ error: e.message ?? e }); + } + + if (isUrlBlocked(url)) { + return res.status(403).json({ + error: + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", + }); + } + + // if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? + // try { + // const a = new WebScraperDataProvider(); + // await a.setOptions({ + // jobId: uuidv4(), + // mode: "single_urls", + // urls: [url], + // crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, + // pageOptions: pageOptions, + // }); + + // const docs = await a.getDocuments(false, (progress) => { + // job.updateProgress({ + // current: progress.current, + // total: progress.total, + // current_step: "SCRAPING", + // current_url: progress.currentDocumentUrl, + // }); + // }); + // return res.json({ + // success: true, + // documents: docs, + // }); + // } catch (error) { + // Logger.error(error); + // return res.status(500).json({ error: error.message }); + // } + // } + + const id = uuidv4(); + + await logCrawl(id, team_id); + + const sc: StoredCrawl = { + originUrl: url, + crawlerOptions, + pageOptions, + team_id, + plan, + createdAt: Date.now(), + }; + + const crawler = crawlToCrawler(id, sc); + + try { + sc.robots = await crawler.getRobotsTxt(); + } catch (_) {} + + await saveCrawl(id, sc); + + const sitemap = sc.crawlerOptions?.ignoreSitemap + ? null + : await crawler.tryGetSitemap(); + + + if (sitemap !== null && sitemap.length > 0) { + let jobPriority = 20; + // If it is over 1000, we need to get the job priority, + // otherwise we can use the default priority of 20 + if(sitemap.length > 1000){ + // set base to 21 + jobPriority = await getJobPriority({plan, team_id, basePriority: 21}) + } + const jobs = sitemap.map((x) => { + const url = x.url; + const uuid = uuidv4(); + return { + name: uuid, + data: { + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: req.body.origin ?? defaultOrigin, + crawl_id: id, + sitemapped: true, + }, + opts: { + jobId: uuid, + priority: jobPriority, + }, + }; + }); + + await lockURLs( + id, + jobs.map((x) => x.data.url) + ); + await addCrawlJobs( + id, + jobs.map((x) => x.opts.jobId) + ); + if (Sentry.isInitialized()) { + for (const job of jobs) { + // add with sentry instrumentation + await addScrapeJob(job.data as any, {}, job.opts.jobId); + } + } else { + await getScrapeQueue().addBulk(jobs); + } + } else { + await lockURL(id, sc, url); + + // Not needed, first one should be 15. + // const jobPriority = await getJobPriority({plan, team_id, basePriority: 10}) + + const job = await addScrapeJob( + { + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: req.body.origin ?? defaultOrigin, + crawl_id: id, + }, + { + priority: 15, // prioritize request 0 of crawl jobs same as scrape jobs + } + ); + await addCrawlJob(id, job.id); + } + + res.json({ jobId: id }); + } catch (error) { + Sentry.captureException(error); + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v0/crawlPreview.ts b/apps/api/src/controllers/v0/crawlPreview.ts new file mode 100644 index 00000000..f8706867 --- /dev/null +++ b/apps/api/src/controllers/v0/crawlPreview.ts @@ -0,0 +1,138 @@ +import { Request, Response } from "express"; +import { authenticateUser } from "../auth"; +import { RateLimiterMode } from "../../../src/types"; +import { isUrlBlocked } from "../../../src/scraper/WebScraper/utils/blocklist"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../../src/lib/logger"; +import { addCrawlJob, crawlToCrawler, lockURL, saveCrawl, StoredCrawl } from "../../../src/lib/crawl-redis"; +import { addScrapeJob } from "../../../src/services/queue-jobs"; +import { checkAndUpdateURL } from "../../../src/lib/validateUrl"; +import * as Sentry from "@sentry/node"; + +export async function crawlPreviewController(req: Request, res: Response) { + try { + const { success, error, status, team_id:a, plan } = await authenticateUser( + req, + res, + RateLimiterMode.Preview + ); + + const team_id = "preview"; + + if (!success) { + return res.status(status).json({ error }); + } + + let url = req.body.url; + if (!url) { + return res.status(400).json({ error: "Url is required" }); + } + try { + url = checkAndUpdateURL(url).url; + } catch (e) { + return res + .status(e instanceof Error && e.message === "Invalid URL" ? 400 : 500) + .json({ error: e.message ?? e }); + } + + if (isUrlBlocked(url)) { + return res + .status(403) + .json({ + error: + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", + }); + } + + const crawlerOptions = req.body.crawlerOptions ?? {}; + const pageOptions = req.body.pageOptions ?? { onlyMainContent: false, includeHtml: false, removeTags: [] }; + + // if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this? + // try { + // const a = new WebScraperDataProvider(); + // await a.setOptions({ + // jobId: uuidv4(), + // mode: "single_urls", + // urls: [url], + // crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true }, + // pageOptions: pageOptions, + // }); + + // const docs = await a.getDocuments(false, (progress) => { + // job.updateProgress({ + // current: progress.current, + // total: progress.total, + // current_step: "SCRAPING", + // current_url: progress.currentDocumentUrl, + // }); + // }); + // return res.json({ + // success: true, + // documents: docs, + // }); + // } catch (error) { + // Logger.error(error); + // return res.status(500).json({ error: error.message }); + // } + // } + + const id = uuidv4(); + + let robots; + + try { + robots = await this.getRobotsTxt(); + } catch (_) {} + + const sc: StoredCrawl = { + originUrl: url, + crawlerOptions, + pageOptions, + team_id, + plan, + robots, + createdAt: Date.now(), + }; + + await saveCrawl(id, sc); + + const crawler = crawlToCrawler(id, sc); + + const sitemap = sc.crawlerOptions?.ignoreSitemap ? null : await crawler.tryGetSitemap(); + + if (sitemap !== null) { + for (const url of sitemap.map(x => x.url)) { + await lockURL(id, sc, url); + const job = await addScrapeJob({ + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: "website-preview", + crawl_id: id, + sitemapped: true, + }); + await addCrawlJob(id, job.id); + } + } else { + await lockURL(id, sc, url); + const job = await addScrapeJob({ + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + origin: "website-preview", + crawl_id: id, + }); + await addCrawlJob(id, job.id); + } + + res.json({ jobId: id }); + } catch (error) { + Sentry.captureException(error); + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/keyAuth.ts b/apps/api/src/controllers/v0/keyAuth.ts similarity index 83% rename from apps/api/src/controllers/keyAuth.ts rename to apps/api/src/controllers/v0/keyAuth.ts index 351edd18..b70d672a 100644 --- a/apps/api/src/controllers/keyAuth.ts +++ b/apps/api/src/controllers/v0/keyAuth.ts @@ -1,8 +1,8 @@ -import { AuthResponse, RateLimiterMode } from "../types"; +import { AuthResponse, RateLimiterMode } from "../../types"; import { Request, Response } from "express"; -import { authenticateUser } from "./auth"; +import { authenticateUser } from "../auth"; export const keyAuthController = async (req: Request, res: Response) => { diff --git a/apps/api/src/controllers/liveness.ts b/apps/api/src/controllers/v0/liveness.ts similarity index 100% rename from apps/api/src/controllers/liveness.ts rename to apps/api/src/controllers/v0/liveness.ts diff --git a/apps/api/src/controllers/readiness.ts b/apps/api/src/controllers/v0/readiness.ts similarity index 100% rename from apps/api/src/controllers/readiness.ts rename to apps/api/src/controllers/v0/readiness.ts diff --git a/apps/api/src/controllers/v0/scrape.ts b/apps/api/src/controllers/v0/scrape.ts new file mode 100644 index 00000000..40df5021 --- /dev/null +++ b/apps/api/src/controllers/v0/scrape.ts @@ -0,0 +1,299 @@ +import { ExtractorOptions, PageOptions } from "./../../lib/entities"; +import { Request, Response } from "express"; +import { + billTeam, + checkTeamCredits, +} from "../../services/billing/credit_billing"; +import { authenticateUser } from "../auth"; +import { PlanType, RateLimiterMode } from "../../types"; +import { logJob } from "../../services/logging/log_job"; +import { Document } from "../../lib/entities"; +import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function +import { numTokensFromString } from "../../lib/LLM-extraction/helpers"; +import { + defaultPageOptions, + defaultExtractorOptions, + defaultTimeout, + defaultOrigin, +} from "../../lib/default-values"; +import { addScrapeJob, waitForJob } from "../../services/queue-jobs"; +import { getScrapeQueue } from "../../services/queue-service"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../lib/logger"; +import * as Sentry from "@sentry/node"; +import { getJobPriority } from "../../lib/job-priority"; + +export async function scrapeHelper( + jobId: string, + req: Request, + team_id: string, + crawlerOptions: any, + pageOptions: PageOptions, + extractorOptions: ExtractorOptions, + timeout: number, + plan?: PlanType +): Promise<{ + success: boolean; + error?: string; + data?: Document; + returnCode: number; +}> { + const url = req.body.url; + if (!url) { + return { success: false, error: "Url is required", returnCode: 400 }; + } + + if (isUrlBlocked(url)) { + return { + success: false, + error: + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", + returnCode: 403, + }; + } + + const jobPriority = await getJobPriority({ plan, team_id, basePriority: 10 }); + + const job = await addScrapeJob( + { + url, + mode: "single_urls", + crawlerOptions, + team_id, + pageOptions, + extractorOptions, + origin: req.body.origin ?? defaultOrigin, + is_scrape: true, + }, + {}, + jobId, + jobPriority + ); + + let doc; + + const err = await Sentry.startSpan( + { + name: "Wait for job to finish", + op: "bullmq.wait", + attributes: { job: jobId }, + }, + async (span) => { + try { + doc = (await waitForJob(job.id, timeout))[0]; + } catch (e) { + if (e instanceof Error && e.message.startsWith("Job wait")) { + span.setAttribute("timedOut", true); + return { + success: false, + error: "Request timed out", + returnCode: 408, + }; + } else if ( + typeof e === "string" && + (e.includes("Error generating completions: ") || + e.includes("Invalid schema for function") || + e.includes( + "LLM extraction did not match the extraction schema you provided." + )) + ) { + return { + success: false, + error: e, + returnCode: 500, + }; + } else { + throw e; + } + } + span.setAttribute("result", JSON.stringify(doc)); + return null; + } + ); + + if (err !== null) { + return err; + } + + await job.remove(); + + if (!doc) { + console.error("!!! PANIC DOC IS", doc, job); + return { + success: true, + error: "No page found", + returnCode: 200, + data: doc, + }; + } + + delete doc.index; + delete doc.provider; + + // Remove rawHtml if pageOptions.rawHtml is false and extractorOptions.mode is llm-extraction-from-raw-html + if ( + !pageOptions.includeRawHtml && + extractorOptions.mode == "llm-extraction-from-raw-html" + ) { + if (doc.rawHtml) { + delete doc.rawHtml; + } + } + + if (!pageOptions.includeHtml) { + if (doc.html) { + delete doc.html; + } + } + + return { + success: true, + data: doc, + returnCode: 200, + }; +} + +export async function scrapeController(req: Request, res: Response) { + try { + let earlyReturn = false; + // make sure to authenticate user first, Bearer + const { success, team_id, error, status, plan } = await authenticateUser( + req, + res, + RateLimiterMode.Scrape + ); + if (!success) { + return res.status(status).json({ error }); + } + + const crawlerOptions = req.body.crawlerOptions ?? {}; + const pageOptions = { ...defaultPageOptions, ...req.body.pageOptions }; + const extractorOptions = { + ...defaultExtractorOptions, + ...req.body.extractorOptions, + }; + const origin = req.body.origin ?? defaultOrigin; + let timeout = req.body.timeout ?? defaultTimeout; + + if (extractorOptions.mode.includes("llm-extraction")) { + if ( + typeof extractorOptions.extractionSchema !== "object" || + extractorOptions.extractionSchema === null + ) { + return res.status(400).json({ + error: + "extractorOptions.extractionSchema must be an object if llm-extraction mode is specified", + }); + } + + pageOptions.onlyMainContent = true; + timeout = req.body.timeout ?? 90000; + } + + // checkCredits + try { + const { success: creditsCheckSuccess, message: creditsCheckMessage } = + await checkTeamCredits(team_id, 1); + if (!creditsCheckSuccess) { + earlyReturn = true; + return res.status(402).json({ error: "Insufficient credits" }); + } + } catch (error) { + Logger.error(error); + earlyReturn = true; + return res.status(500).json({ + error: + "Error checking team credits. Please contact hello@firecrawl.com for help.", + }); + } + + const jobId = uuidv4(); + + const startTime = new Date().getTime(); + const result = await scrapeHelper( + jobId, + req, + team_id, + crawlerOptions, + pageOptions, + extractorOptions, + timeout, + plan + ); + const endTime = new Date().getTime(); + const timeTakenInSeconds = (endTime - startTime) / 1000; + const numTokens = + result.data && result.data.markdown + ? numTokensFromString(result.data.markdown, "gpt-3.5-turbo") + : 0; + + if (result.success) { + let creditsToBeBilled = 1; + const creditsPerLLMExtract = 49; + + if (extractorOptions.mode.includes("llm-extraction")) { + // creditsToBeBilled = creditsToBeBilled + (creditsPerLLMExtract * filteredDocs.length); + creditsToBeBilled += creditsPerLLMExtract; + } + + let startTimeBilling = new Date().getTime(); + + if (earlyReturn) { + // Don't bill if we're early returning + return; + } + if (creditsToBeBilled > 0) { + // billing for doc done on queue end, bill only for llm extraction + const billingResult = await billTeam(team_id, creditsToBeBilled); + if (!billingResult.success) { + return res.status(402).json({ + success: false, + error: + "Failed to bill team. Insufficient credits or subscription not found.", + }); + } + } + } + + let doc = result.data; + if (!pageOptions || !pageOptions.includeRawHtml) { + if (doc && doc.rawHtml) { + delete doc.rawHtml; + } + } + + if(pageOptions && pageOptions.includeExtract) { + if(!pageOptions.includeMarkdown && doc && doc.markdown) { + delete doc.markdown; + } + } + + logJob({ + job_id: jobId, + success: result.success, + message: result.error, + num_docs: 1, + docs: [doc], + time_taken: timeTakenInSeconds, + team_id: team_id, + mode: "scrape", + url: req.body.url, + crawlerOptions: crawlerOptions, + pageOptions: pageOptions, + origin: origin, + extractor_options: extractorOptions, + num_tokens: numTokens, + }); + + return res.status(result.returnCode).json(result); + } catch (error) { + Sentry.captureException(error); + Logger.error(error); + return res.status(500).json({ + error: + typeof error === "string" + ? error + : error?.message ?? "Internal Server Error", + }); + } +} diff --git a/apps/api/src/controllers/search.ts b/apps/api/src/controllers/v0/search.ts similarity index 60% rename from apps/api/src/controllers/search.ts rename to apps/api/src/controllers/v0/search.ts index dfd9b8b9..825abbe1 100644 --- a/apps/api/src/controllers/search.ts +++ b/apps/api/src/controllers/v0/search.ts @@ -1,14 +1,18 @@ import { Request, Response } from "express"; -import { WebScraperDataProvider } from "../scraper/WebScraper"; -import { billTeam, checkTeamCredits } from "../services/billing/credit_billing"; -import { authenticateUser } from "./auth"; -import { RateLimiterMode } from "../types"; -import { logJob } from "../services/logging/log_job"; -import { PageOptions, SearchOptions } from "../lib/entities"; -import { search } from "../search"; -import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; +import { WebScraperDataProvider } from "../../scraper/WebScraper"; +import { billTeam, checkTeamCredits } from "../../services/billing/credit_billing"; +import { authenticateUser } from "../auth"; +import { PlanType, RateLimiterMode } from "../../types"; +import { logJob } from "../../services/logging/log_job"; +import { PageOptions, SearchOptions } from "../../lib/entities"; +import { search } from "../../search"; +import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; import { v4 as uuidv4 } from "uuid"; -import { Logger } from "../lib/logger"; +import { Logger } from "../../lib/logger"; +import { getScrapeQueue } from "../../services/queue-service"; +import { addScrapeJob, waitForJob } from "../../services/queue-jobs"; +import * as Sentry from "@sentry/node"; +import { getJobPriority } from "../../lib/job-priority"; export async function searchHelper( jobId: string, @@ -17,6 +21,7 @@ export async function searchHelper( crawlerOptions: any, pageOptions: PageOptions, searchOptions: SearchOptions, + plan: PlanType ): Promise<{ success: boolean; error?: string; @@ -73,55 +78,57 @@ export async function searchHelper( return { success: true, error: "No search results found", returnCode: 200 }; } + const jobPriority = await getJobPriority({plan, team_id, basePriority: 20}); + // filter out social media links + const jobDatas = res.map(x => { + const url = x.url; + const uuid = uuidv4(); + return { + name: uuid, + data: { + url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: team_id, + pageOptions: pageOptions, + }, + opts: { + jobId: uuid, + priority: jobPriority, + } + }; + }) - const a = new WebScraperDataProvider(); - await a.setOptions({ - jobId, - mode: "single_urls", - urls: res.map((r) => r.url).slice(0, searchOptions.limit ?? 7), - crawlerOptions: { - ...crawlerOptions, - }, - pageOptions: { - ...pageOptions, - onlyMainContent: pageOptions?.onlyMainContent ?? true, - fetchPageContent: pageOptions?.fetchPageContent ?? true, - includeHtml: pageOptions?.includeHtml ?? false, - removeTags: pageOptions?.removeTags ?? [], - fallback: false, - }, - }); + let jobs = []; + if (Sentry.isInitialized()) { + for (const job of jobDatas) { + // add with sentry instrumentation + jobs.push(await addScrapeJob(job.data as any, {}, job.opts.jobId)); + } + } else { + jobs = await getScrapeQueue().addBulk(jobDatas); + await getScrapeQueue().addBulk(jobs); + } - const docs = await a.getDocuments(false); + const docs = (await Promise.all(jobs.map(x => waitForJob(x.id, 60000)))).map(x => x[0]); if (docs.length === 0) { return { success: true, error: "No search results found", returnCode: 200 }; } + await Promise.all(jobs.map(x => x.remove())); + // make sure doc.content is not empty const filteredDocs = docs.filter( - (doc: { content?: string }) => doc.content && doc.content.trim().length > 0 + (doc: { content?: string }) => doc && doc.content && doc.content.trim().length > 0 ); if (filteredDocs.length === 0) { return { success: true, error: "No page found", returnCode: 200, data: docs }; } - const billingResult = await billTeam( - team_id, - filteredDocs.length - ); - if (!billingResult.success) { - return { - success: false, - error: - "Failed to bill team. Insufficient credits or subscription not found.", - returnCode: 402, - }; - } - return { success: true, data: filteredDocs, @@ -132,7 +139,7 @@ export async function searchHelper( export async function searchController(req: Request, res: Response) { try { // make sure to authenticate user first, Bearer - const { success, team_id, error, status } = await authenticateUser( + const { success, team_id, error, status, plan } = await authenticateUser( req, res, RateLimiterMode.Search @@ -142,16 +149,16 @@ export async function searchController(req: Request, res: Response) { } const crawlerOptions = req.body.crawlerOptions ?? {}; const pageOptions = req.body.pageOptions ?? { - includeHtml: false, - onlyMainContent: true, - fetchPageContent: true, - removeTags: [], - fallback: false, + includeHtml: req.body.pageOptions?.includeHtml ?? false, + onlyMainContent: req.body.pageOptions?.onlyMainContent ?? false, + fetchPageContent: req.body.pageOptions?.fetchPageContent ?? true, + removeTags: req.body.pageOptions?.removeTags ?? [], + fallback: req.body.pageOptions?.fallback ?? false, }; const origin = req.body.origin ?? "api"; - const searchOptions = req.body.searchOptions ?? { limit: 7 }; - + const searchOptions = req.body.searchOptions ?? { limit: 5 }; + const jobId = uuidv4(); try { @@ -161,6 +168,7 @@ export async function searchController(req: Request, res: Response) { return res.status(402).json({ error: "Insufficient credits" }); } } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: "Internal server error" }); } @@ -172,6 +180,7 @@ export async function searchController(req: Request, res: Response) { crawlerOptions, pageOptions, searchOptions, + plan ); const endTime = new Date().getTime(); const timeTakenInSeconds = (endTime - startTime) / 1000; @@ -191,6 +200,11 @@ export async function searchController(req: Request, res: Response) { }); return res.status(result.returnCode).json(result); } catch (error) { + if (error instanceof Error && error.message.startsWith("Job wait")) { + return res.status(408).json({ error: "Request timed out" }); + } + + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/v0/status.ts b/apps/api/src/controllers/v0/status.ts new file mode 100644 index 00000000..34ebb3c6 --- /dev/null +++ b/apps/api/src/controllers/v0/status.ts @@ -0,0 +1,43 @@ +import { Request, Response } from "express"; +import { Logger } from "../../../src/lib/logger"; +import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis"; +import { getJobs } from "./crawl-status"; +import * as Sentry from "@sentry/node"; + +export async function crawlJobStatusPreviewController(req: Request, res: Response) { + try { + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ error: "Job not found" }); + } + + const jobIDs = await getCrawlJobs(req.params.jobId); + + // let data = job.returnvalue; + // if (process.env.USE_DB_AUTHENTICATION === "true") { + // const supabaseData = await supabaseGetJobById(req.params.jobId); + + // if (supabaseData) { + // data = supabaseData.docs; + // } + // } + + const jobs = (await getJobs(jobIDs)).sort((a, b) => a.timestamp - b.timestamp); + const jobStatuses = await Promise.all(jobs.map(x => x.getState())); + const jobStatus = sc.cancelled ? "failed" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "active"; + + const data = jobs.map(x => Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue); + + res.json({ + status: jobStatus, + current: jobStatuses.filter(x => x === "completed" || x === "failed").length, + total: jobs.length, + data: jobStatus === "completed" ? data : null, + partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), + }); + } catch (error) { + Sentry.captureException(error); + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v1/__tests__/crawl.test.ts.WIP b/apps/api/src/controllers/v1/__tests__/crawl.test.ts.WIP new file mode 100644 index 00000000..621c7436 --- /dev/null +++ b/apps/api/src/controllers/v1/__tests__/crawl.test.ts.WIP @@ -0,0 +1,47 @@ +import { crawlController } from '../crawl' +import { Request, Response } from 'express'; +import { authenticateUser } from '../auth'; // Ensure this import is correct +import { createIdempotencyKey } from '../../services/idempotency/create'; +import { validateIdempotencyKey } from '../../services/idempotency/validate'; +import { v4 as uuidv4 } from 'uuid'; + +jest.mock('../auth', () => ({ + authenticateUser: jest.fn().mockResolvedValue({ + success: true, + team_id: 'team123', + error: null, + status: 200 + }), + reduce: jest.fn() +})); +jest.mock('../../services/idempotency/validate'); + +describe('crawlController', () => { + it('should prevent duplicate requests using the same idempotency key', async () => { + const req = { + headers: { + 'x-idempotency-key': await uuidv4(), + 'Authorization': `Bearer ${process.env.TEST_API_KEY}` + }, + body: { + url: 'https://mendable.ai' + } + } as unknown as Request; + const res = { + status: jest.fn().mockReturnThis(), + json: jest.fn() + } as unknown as Response; + + // Mock the idempotency key validation to return false for the second call + (validateIdempotencyKey as jest.Mock).mockResolvedValueOnce(true).mockResolvedValueOnce(false); + + // First request should succeed + await crawlController(req, res); + expect(res.status).not.toHaveBeenCalledWith(409); + + // Second request with the same key should fail + await crawlController(req, res); + expect(res.status).toHaveBeenCalledWith(409); + expect(res.json).toHaveBeenCalledWith({ error: 'Idempotency key already used' }); + }); +}); \ No newline at end of file diff --git a/apps/api/src/controllers/v1/__tests__/urlValidation.test.ts b/apps/api/src/controllers/v1/__tests__/urlValidation.test.ts new file mode 100644 index 00000000..0a9931d3 --- /dev/null +++ b/apps/api/src/controllers/v1/__tests__/urlValidation.test.ts @@ -0,0 +1,64 @@ +import { url } from "../types"; + +describe("URL Schema Validation", () => { + beforeEach(() => { + jest.resetAllMocks(); + }); + + it("should prepend http:// to URLs without a protocol", () => { + const result = url.parse("example.com"); + expect(result).toBe("http://example.com"); + }); + + it("should allow valid URLs with http or https", () => { + expect(() => url.parse("http://example.com")).not.toThrow(); + expect(() => url.parse("https://example.com")).not.toThrow(); + }); + + it("should allow valid URLs with http or https", () => { + expect(() => url.parse("example.com")).not.toThrow(); + }); + + it("should reject URLs with unsupported protocols", () => { + expect(() => url.parse("ftp://example.com")).toThrow("Invalid URL"); + }); + + it("should reject URLs without a valid top-level domain", () => { + expect(() => url.parse("http://example")).toThrow("URL must have a valid top-level domain or be a valid path"); + }); + + it("should reject blocked URLs", () => { + expect(() => url.parse("https://facebook.com")).toThrow("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); + }); + + it("should handle URLs with subdomains correctly", () => { + expect(() => url.parse("http://sub.example.com")).not.toThrow(); + expect(() => url.parse("https://blog.example.com")).not.toThrow(); + }); + + it("should handle URLs with paths correctly", () => { + expect(() => url.parse("http://example.com/path")).not.toThrow(); + expect(() => url.parse("https://example.com/another/path")).not.toThrow(); + }); + + it("should handle URLs with subdomains that are blocked", () => { + expect(() => url.parse("https://sub.facebook.com")).toThrow("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); + }); + + it("should handle URLs with paths that are blocked", () => { + expect(() => url.parse("http://facebook.com/path")).toThrow("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); + expect(() => url.parse("https://facebook.com/another/path")).toThrow("Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it."); + }); + + it("should reject malformed URLs starting with 'http://http'", () => { + expect(() => url.parse("http://http://example.com")).toThrow("Invalid URL. Invalid protocol."); + }); + + it("should reject malformed URLs containing multiple 'http://'", () => { + expect(() => url.parse("http://example.com/http://example.com")).not.toThrow(); + }); + + it("should reject malformed URLs containing multiple 'http://'", () => { + expect(() => url.parse("http://ex ample.com/")).toThrow("Invalid URL"); + }); +}) \ No newline at end of file diff --git a/apps/api/src/controllers/v1/crawl-cancel.ts b/apps/api/src/controllers/v1/crawl-cancel.ts new file mode 100644 index 00000000..06a5b26e --- /dev/null +++ b/apps/api/src/controllers/v1/crawl-cancel.ts @@ -0,0 +1,58 @@ +import { Request, Response } from "express"; +import { authenticateUser } from "../auth"; +import { RateLimiterMode } from "../../types"; +import { supabase_service } from "../../services/supabase"; +import { Logger } from "../../lib/logger"; +import { getCrawl, saveCrawl } from "../../lib/crawl-redis"; +import * as Sentry from "@sentry/node"; + +export async function crawlCancelController(req: Request, res: Response) { + try { + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; + + const { success, team_id, error, status } = await authenticateUser( + req, + res, + RateLimiterMode.CrawlStatus + ); + if (!success) { + return res.status(status).json({ error }); + } + + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ error: "Job not found" }); + } + + // check if the job belongs to the team + if (useDbAuthentication) { + const { data, error: supaError } = await supabase_service + .from("bulljobs_teams") + .select("*") + .eq("job_id", req.params.jobId) + .eq("team_id", team_id); + if (supaError) { + return res.status(500).json({ error: supaError.message }); + } + + if (data.length === 0) { + return res.status(403).json({ error: "Unauthorized" }); + } + } + + try { + sc.cancelled = true; + await saveCrawl(req.params.jobId, sc); + } catch (error) { + Logger.error(error); + } + + res.json({ + status: "cancelled" + }); + } catch (error) { + Sentry.captureException(error); + Logger.error(error); + return res.status(500).json({ error: error.message }); + } +} diff --git a/apps/api/src/controllers/v1/crawl-status-ws.ts b/apps/api/src/controllers/v1/crawl-status-ws.ts new file mode 100644 index 00000000..8d823096 --- /dev/null +++ b/apps/api/src/controllers/v1/crawl-status-ws.ts @@ -0,0 +1,161 @@ +import { authMiddleware } from "../../routes/v1"; +import { RateLimiterMode } from "../../types"; +import { authenticateUser } from "../auth"; +import { CrawlStatusParams, CrawlStatusResponse, Document, ErrorResponse, legacyDocumentConverter, RequestWithAuth } from "./types"; +import { WebSocket } from "ws"; +import { v4 as uuidv4 } from "uuid"; +import { Logger } from "../../lib/logger"; +import { getCrawl, getCrawlExpiry, getCrawlJobs, getDoneJobsOrdered, getDoneJobsOrderedLength, isCrawlFinished, isCrawlFinishedLocked } from "../../lib/crawl-redis"; +import { getScrapeQueue } from "../../services/queue-service"; +import { getJob, getJobs } from "./crawl-status"; +import * as Sentry from "@sentry/node"; + +type ErrorMessage = { + type: "error", + error: string, +} + +type CatchupMessage = { + type: "catchup", + data: CrawlStatusResponse, +} + +type DocumentMessage = { + type: "document", + data: Document, +} + +type DoneMessage = { type: "done" } + +type Message = ErrorMessage | CatchupMessage | DoneMessage | DocumentMessage; + +function send(ws: WebSocket, msg: Message) { + if (ws.readyState === 1) { + return new Promise((resolve, reject) => { + ws.send(JSON.stringify(msg), (err) => { + if (err) reject(err); + else resolve(null); + }); + }); + } +} + +function close(ws: WebSocket, code: number, msg: Message) { + if (ws.readyState <= 1) { + ws.close(code, JSON.stringify(msg)); + } +} + +async function crawlStatusWS(ws: WebSocket, req: RequestWithAuth) { + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return close(ws, 1008, { type: "error", error: "Job not found" }); + } + + if (sc.team_id !== req.auth.team_id) { + return close(ws, 3003, { type: "error", error: "Forbidden" }); + } + + let doneJobIDs = []; + let finished = false; + + const loop = async () => { + if (finished) return; + + const jobIDs = await getCrawlJobs(req.params.jobId); + + if (jobIDs.length === doneJobIDs.length) { + return close(ws, 1000, { type: "done" }); + } + + const notDoneJobIDs = jobIDs.filter(x => !doneJobIDs.includes(x)); + const jobStatuses = await Promise.all(notDoneJobIDs.map(async x => [x, await getScrapeQueue().getJobState(x)])); + const newlyDoneJobIDs = jobStatuses.filter(x => x[1] === "completed" || x[1] === "failed").map(x => x[0]); + + for (const jobID of newlyDoneJobIDs) { + const job = await getJob(jobID); + + if (job.returnvalue) { + send(ws, { + type: "document", + data: legacyDocumentConverter(job.returnvalue), + }) + } else { + return close(ws, 3000, { type: "error", error: job.failedReason }); + } + } + + doneJobIDs.push(...newlyDoneJobIDs); + + setTimeout(loop, 1000); + }; + + setTimeout(loop, 1000); + + doneJobIDs = await getDoneJobsOrdered(req.params.jobId); + + const jobIDs = await getCrawlJobs(req.params.jobId); + const jobStatuses = await Promise.all(jobIDs.map(x => getScrapeQueue().getJobState(x))); + const status: Exclude["status"] = sc.cancelled ? "cancelled" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "scraping"; + const doneJobs = await getJobs(doneJobIDs); + const data = doneJobs.map(x => x.returnvalue); + + send(ws, { + type: "catchup", + data: { + status, + total: jobIDs.length, + completed: doneJobIDs.length, + creditsUsed: jobIDs.length, + expiresAt: (await getCrawlExpiry(req.params.jobId)).toISOString(), + data: data.map(x => legacyDocumentConverter(x)), + } + }); + + if (status !== "scraping") { + finished = true; + return close(ws, 1000, { type: "done" }); + } +} + +// Basically just middleware and error wrapping +export async function crawlStatusWSController(ws: WebSocket, req: RequestWithAuth) { + try { + const { success, team_id, error, status, plan } = await authenticateUser( + req, + null, + RateLimiterMode.CrawlStatus, + ); + + if (!success) { + return close(ws, 3000, { + type: "error", + error, + }); + } + + req.auth = { team_id, plan }; + + await crawlStatusWS(ws, req); + } catch (err) { + Sentry.captureException(err); + + const id = uuidv4(); + let verbose = JSON.stringify(err); + if (verbose === "{}") { + if (err instanceof Error) { + verbose = JSON.stringify({ + message: err.message, + name: err.name, + stack: err.stack, + }); + } + } + + Logger.error("Error occurred in WebSocket! (" + req.path + ") -- ID " + id + " -- " + verbose); + return close(ws, 1011, { + type: "error", + error: "An unexpected error occurred. Please contact hello@firecrawl.com for help. Your exception ID is " + id + }); + } +} diff --git a/apps/api/src/controllers/v1/crawl-status.ts b/apps/api/src/controllers/v1/crawl-status.ts new file mode 100644 index 00000000..845f616c --- /dev/null +++ b/apps/api/src/controllers/v1/crawl-status.ts @@ -0,0 +1,126 @@ +import { Response } from "express"; +import { CrawlStatusParams, CrawlStatusResponse, ErrorResponse, legacyDocumentConverter, RequestWithAuth } from "./types"; +import { getCrawl, getCrawlExpiry, getCrawlJobs, getDoneJobsOrdered, getDoneJobsOrderedLength } from "../../lib/crawl-redis"; +import { getScrapeQueue } from "../../services/queue-service"; +import { supabaseGetJobById, supabaseGetJobsById } from "../../lib/supabase-jobs"; + +export async function getJob(id: string) { + const job = await getScrapeQueue().getJob(id); + if (!job) return job; + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobById(id); + + if (supabaseData) { + job.returnvalue = supabaseData.docs; + } + } + + job.returnvalue = Array.isArray(job.returnvalue) ? job.returnvalue[0] : job.returnvalue; + + return job; +} + +export async function getJobs(ids: string[]) { + const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x); + + if (process.env.USE_DB_AUTHENTICATION === "true") { + const supabaseData = await supabaseGetJobsById(ids); + + supabaseData.forEach(x => { + const job = jobs.find(y => y.id === x.job_id); + if (job) { + job.returnvalue = x.docs; + } + }) + } + + jobs.forEach(job => { + job.returnvalue = Array.isArray(job.returnvalue) ? job.returnvalue[0] : job.returnvalue; + }); + + return jobs; +} + +export async function crawlStatusController(req: RequestWithAuth, res: Response) { + const sc = await getCrawl(req.params.jobId); + if (!sc) { + return res.status(404).json({ success: false, error: "Job not found" }); + } + + if (sc.team_id !== req.auth.team_id) { + return res.status(403).json({ success: false, error: "Forbidden" }); + } + + const start = typeof req.query.skip === "string" ? parseInt(req.query.skip, 10) : 0; + const end = typeof req.query.limit === "string" ? (start + parseInt(req.query.limit, 10) - 1) : undefined; + + const jobIDs = await getCrawlJobs(req.params.jobId); + const jobStatuses = await Promise.all(jobIDs.map(x => getScrapeQueue().getJobState(x))); + const status: Exclude["status"] = sc.cancelled ? "cancelled" : jobStatuses.every(x => x === "completed") ? "completed" : jobStatuses.some(x => x === "failed") ? "failed" : "scraping"; + const doneJobsLength = await getDoneJobsOrderedLength(req.params.jobId); + const doneJobsOrder = await getDoneJobsOrdered(req.params.jobId, start, end ?? -1); + + let doneJobs = []; + + if (end === undefined) { // determine 10 megabyte limit + let bytes = 0; + const bytesLimit = 10485760; // 10 MiB in bytes + const factor = 100; // chunking for faster retrieval + + for (let i = 0; i < doneJobsOrder.length && bytes < bytesLimit; i += factor) { + // get current chunk and retrieve jobs + const currentIDs = doneJobsOrder.slice(i, i+factor); + const jobs = await getJobs(currentIDs); + + // iterate through jobs and add them one them one to the byte counter + // both loops will break once we cross the byte counter + for (let ii = 0; ii < jobs.length && bytes < bytesLimit; ii++) { + const job = jobs[ii]; + doneJobs.push(job); + bytes += JSON.stringify(legacyDocumentConverter(job.returnvalue)).length; + } + } + + // if we ran over the bytes limit, remove the last document + if (bytes > bytesLimit) { + doneJobs.splice(doneJobs.length - 1, 1); + } + } else { + doneJobs = await getJobs(doneJobsOrder); + } + + const data = doneJobs.map(x => x.returnvalue); + + const nextURL = new URL(`${req.protocol}://${req.get("host")}/v1/crawl/${req.params.jobId}`); + + nextURL.searchParams.set("skip", (start + data.length).toString()); + + if (typeof req.query.limit === "string") { + nextURL.searchParams.set("limit", req.query.limit); + } + + if (data.length > 0) { + if (!doneJobs[0].data.pageOptions.includeRawHtml) { + for (let ii = 0; ii < doneJobs.length; ii++) { + if (data[ii]) { + delete data[ii].rawHtml; + } + } + } + } + + res.status(200).json({ + status, + completed: doneJobsLength, + total: jobIDs.length, + creditsUsed: jobIDs.length, + expiresAt: (await getCrawlExpiry(req.params.jobId)).toISOString(), + next: + status !== "scraping" && (start + data.length) === doneJobsLength // if there's not gonna be any documents after this + ? undefined + : nextURL.href, + data: data.map(x => legacyDocumentConverter(x)), + }); +} + diff --git a/apps/api/src/controllers/v1/crawl.ts b/apps/api/src/controllers/v1/crawl.ts new file mode 100644 index 00000000..c2d5bdca --- /dev/null +++ b/apps/api/src/controllers/v1/crawl.ts @@ -0,0 +1,165 @@ +import { Response } from "express"; +import { v4 as uuidv4 } from "uuid"; +import { + CrawlRequest, + crawlRequestSchema, + CrawlResponse, + legacyCrawlerOptions, + legacyScrapeOptions, + RequestWithAuth, +} from "./types"; +import { + addCrawlJob, + addCrawlJobs, + crawlToCrawler, + lockURL, + lockURLs, + saveCrawl, + StoredCrawl, +} from "../../lib/crawl-redis"; +import { logCrawl } from "../../services/logging/crawl_log"; +import { getScrapeQueue } from "../../services/queue-service"; +import { addScrapeJob } from "../../services/queue-jobs"; +import { Logger } from "../../lib/logger"; +import { getJobPriority } from "../../lib/job-priority"; +import { callWebhook } from "../../services/webhook"; + +export async function crawlController( + req: RequestWithAuth<{}, CrawlResponse, CrawlRequest>, + res: Response +) { + req.body = crawlRequestSchema.parse(req.body); + + const id = uuidv4(); + + await logCrawl(id, req.auth.team_id); + + const { remainingCredits } = req.account; + + const crawlerOptions = legacyCrawlerOptions(req.body); + const pageOptions = legacyScrapeOptions(req.body.scrapeOptions); + + // TODO: @rafa, is this right? copied from v0 + if (Array.isArray(crawlerOptions.includes)) { + for (const x of crawlerOptions.includes) { + try { + new RegExp(x); + } catch (e) { + return res.status(400).json({ success: false, error: e.message }); + } + } + } + + if (Array.isArray(crawlerOptions.excludes)) { + for (const x of crawlerOptions.excludes) { + try { + new RegExp(x); + } catch (e) { + return res.status(400).json({ success: false, error: e.message }); + } + } + } + + crawlerOptions.limit = Math.min(remainingCredits, crawlerOptions.limit); + + const sc: StoredCrawl = { + originUrl: req.body.url, + crawlerOptions, + pageOptions, + team_id: req.auth.team_id, + createdAt: Date.now(), + plan: req.auth.plan, + }; + + const crawler = crawlToCrawler(id, sc); + + try { + sc.robots = await crawler.getRobotsTxt(); + } catch (e) { + Logger.debug( + `[Crawl] Failed to get robots.txt (this is probably fine!): ${JSON.stringify( + e + )}` + ); + } + + await saveCrawl(id, sc); + + const sitemap = sc.crawlerOptions.ignoreSitemap + ? null + : await crawler.tryGetSitemap(); + + if (sitemap !== null && sitemap.length > 0) { + let jobPriority = 20; + // If it is over 1000, we need to get the job priority, + // otherwise we can use the default priority of 20 + if(sitemap.length > 1000){ + // set base to 21 + jobPriority = await getJobPriority({plan: req.auth.plan, team_id: req.auth.team_id, basePriority: 21}) + } + const jobs = sitemap.map((x) => { + const url = x.url; + const uuid = uuidv4(); + return { + name: uuid, + data: { + url, + mode: "single_urls", + team_id: req.auth.team_id, + crawlerOptions, + pageOptions, + origin: "api", + crawl_id: id, + sitemapped: true, + webhook: req.body.webhook, + v1: true, + }, + opts: { + jobId: uuid, + priority: 20, + }, + }; + }); + + await lockURLs( + id, + jobs.map((x) => x.data.url) + ); + await addCrawlJobs( + id, + jobs.map((x) => x.opts.jobId) + ); + await getScrapeQueue().addBulk(jobs); + } else { + await lockURL(id, sc, req.body.url); + const job = await addScrapeJob( + { + url: req.body.url, + mode: "single_urls", + crawlerOptions: crawlerOptions, + team_id: req.auth.team_id, + pageOptions: pageOptions, + origin: "api", + crawl_id: id, + webhook: req.body.webhook, + v1: true, + }, + { + priority: 15, + } + ); + await addCrawlJob(id, job.id); + } + + if(req.body.webhook) { + await callWebhook(req.auth.team_id, id, null, req.body.webhook, true, "crawl.started"); + } + + return res.status(200).json({ + success: true, + id, + url: `${req.protocol}://${req.get("host")}/v1/crawl/${id}`, + }); +} + + diff --git a/apps/api/src/controllers/v1/liveness.ts b/apps/api/src/controllers/v1/liveness.ts new file mode 100644 index 00000000..8ff1a96f --- /dev/null +++ b/apps/api/src/controllers/v1/liveness.ts @@ -0,0 +1,6 @@ +import { Request, Response } from "express"; + +export async function livenessController(req: Request, res: Response) { + //TODO: add checks if the application is live and healthy like checking the redis connection + res.status(200).json({ status: "ok" }); +} diff --git a/apps/api/src/controllers/v1/map.ts b/apps/api/src/controllers/v1/map.ts new file mode 100644 index 00000000..21e91840 --- /dev/null +++ b/apps/api/src/controllers/v1/map.ts @@ -0,0 +1,132 @@ +import { Response } from "express"; +import { v4 as uuidv4 } from "uuid"; +import { + legacyCrawlerOptions, + mapRequestSchema, + RequestWithAuth, +} from "./types"; +import { crawlToCrawler, StoredCrawl } from "../../lib/crawl-redis"; +import { MapResponse, MapRequest } from "./types"; +import { configDotenv } from "dotenv"; +import { + checkAndUpdateURLForMap, + isSameDomain, + isSameSubdomain, + removeDuplicateUrls, +} from "../../lib/validateUrl"; +import { fireEngineMap } from "../../search/fireEngine"; +import { billTeam } from "../../services/billing/credit_billing"; +import { logJob } from "../../services/logging/log_job"; +import { performCosineSimilarity } from "../../lib/map-cosine"; + +configDotenv(); + +export async function mapController( + req: RequestWithAuth<{}, MapResponse, MapRequest>, + res: Response +) { + const startTime = new Date().getTime(); + + req.body = mapRequestSchema.parse(req.body); + + + const limit : number = req.body.limit ?? 5000; + + const id = uuidv4(); + let links: string[] = [req.body.url]; + + const sc: StoredCrawl = { + originUrl: req.body.url, + crawlerOptions: legacyCrawlerOptions(req.body), + pageOptions: {}, + team_id: req.auth.team_id, + createdAt: Date.now(), + plan: req.auth.plan, + }; + + const crawler = crawlToCrawler(id, sc); + + const sitemap = req.body.ignoreSitemap ? null : await crawler.tryGetSitemap(); + + if (sitemap !== null) { + sitemap.map((x) => { + links.push(x.url); + }); + } + + let urlWithoutWww = req.body.url.replace("www.", ""); + + let mapUrl = req.body.search + ? `"${req.body.search}" site:${urlWithoutWww}` + : `site:${req.body.url}`; + // www. seems to exclude subdomains in some cases + const mapResults = await fireEngineMap(mapUrl, { + // limit to 50 results (beta) + numResults: Math.min(limit, 50), + }); + + if (mapResults.length > 0) { + if (req.body.search) { + // Ensure all map results are first, maintaining their order + links = [ + mapResults[0].url, + ...mapResults.slice(1).map((x) => x.url), + ...links, + ]; + } else { + mapResults.map((x) => { + links.push(x.url); + }); + } + } + + // Perform cosine similarity between the search query and the list of links + if (req.body.search) { + const searchQuery = req.body.search.toLowerCase(); + + links = performCosineSimilarity(links, searchQuery); + } + + links = links.map((x) => checkAndUpdateURLForMap(x).url.trim()); + + // allows for subdomains to be included + links = links.filter((x) => isSameDomain(x, req.body.url)); + + // if includeSubdomains is false, filter out subdomains + if (!req.body.includeSubdomains) { + links = links.filter((x) => isSameSubdomain(x, req.body.url)); + } + + // remove duplicates that could be due to http/https or www + links = removeDuplicateUrls(links); + + await billTeam(req.auth.team_id, 1); + + const endTime = new Date().getTime(); + const timeTakenInSeconds = (endTime - startTime) / 1000; + + const linksToReturn = links.slice(0, limit); + + logJob({ + job_id: id, + success: links.length > 0, + message: "Map completed", + num_docs: linksToReturn.length, + docs: linksToReturn, + time_taken: timeTakenInSeconds, + team_id: req.auth.team_id, + mode: "map", + url: req.body.url, + crawlerOptions: {}, + pageOptions: {}, + origin: req.body.origin, + extractor_options: { mode: "markdown" }, + num_tokens: 0, + }); + + return res.status(200).json({ + success: true, + links: linksToReturn, + scrape_id: req.body.origin?.includes("website") ? id : undefined, + }); +} diff --git a/apps/api/src/controllers/v1/readiness.ts b/apps/api/src/controllers/v1/readiness.ts new file mode 100644 index 00000000..cdb1f02c --- /dev/null +++ b/apps/api/src/controllers/v1/readiness.ts @@ -0,0 +1,6 @@ +import { Request, Response } from "express"; + +export async function readinessController(req: Request, res: Response) { + // TODO: add checks when the application is ready to serve traffic + res.status(200).json({ status: "ok" }); +} diff --git a/apps/api/src/controllers/v1/scrape-status.ts b/apps/api/src/controllers/v1/scrape-status.ts new file mode 100644 index 00000000..5e0aecb6 --- /dev/null +++ b/apps/api/src/controllers/v1/scrape-status.ts @@ -0,0 +1,38 @@ +import { Response } from "express"; +import { supabaseGetJobByIdOnlyData } from "../../lib/supabase-jobs"; +import { scrapeStatusRateLimiter } from "../../services/rate-limiter"; + +export async function scrapeStatusController(req: any, res: any) { + try { + const rateLimiter = scrapeStatusRateLimiter; + const incomingIP = (req.headers["x-forwarded-for"] || + req.socket.remoteAddress) as string; + const iptoken = incomingIP; + await rateLimiter.consume(iptoken); + + const job = await supabaseGetJobByIdOnlyData(req.params.jobId); + + if(job.team_id !== "41bdbfe1-0579-4d9b-b6d5-809f16be12f5"){ + return res.status(403).json({ + success: false, + error: "You are not allowed to access this resource.", + }); + } + return res.status(200).json({ + success: true, + data: job?.docs[0], + }); + } catch (error) { + if (error instanceof Error && error.message == "Too Many Requests") { + return res.status(429).json({ + success: false, + error: "Rate limit exceeded. Please try again later.", + }); + } else { + return res.status(500).json({ + success: false, + error: "An unexpected error occurred.", + }); + } + } +} diff --git a/apps/api/src/controllers/v1/scrape.ts b/apps/api/src/controllers/v1/scrape.ts new file mode 100644 index 00000000..9fba1a45 --- /dev/null +++ b/apps/api/src/controllers/v1/scrape.ts @@ -0,0 +1,152 @@ +import { Request, Response } from "express"; +import { Logger } from "../../lib/logger"; +import { + Document, + legacyDocumentConverter, + legacyExtractorOptions, + legacyScrapeOptions, + RequestWithAuth, + ScrapeRequest, + scrapeRequestSchema, + ScrapeResponse, +} from "./types"; +import { billTeam } from "../../services/billing/credit_billing"; +import { v4 as uuidv4 } from "uuid"; +import { numTokensFromString } from "../../lib/LLM-extraction/helpers"; +import { addScrapeJob, waitForJob } from "../../services/queue-jobs"; +import { logJob } from "../../services/logging/log_job"; +import { getJobPriority } from "../../lib/job-priority"; +import { PlanType } from "../../types"; + +export async function scrapeController( + req: RequestWithAuth<{}, ScrapeResponse, ScrapeRequest>, + res: Response +) { + req.body = scrapeRequestSchema.parse(req.body); + let earlyReturn = false; + + const origin = req.body.origin; + const timeout = req.body.timeout; + const pageOptions = legacyScrapeOptions(req.body); + const extractorOptions = req.body.extract ? legacyExtractorOptions(req.body.extract) : undefined; + const jobId = uuidv4(); + + const startTime = new Date().getTime(); + const jobPriority = await getJobPriority({ + plan: req.auth.plan as PlanType, + team_id: req.auth.team_id, + basePriority: 10, + }); + + const job = await addScrapeJob( + { + url: req.body.url, + mode: "single_urls", + crawlerOptions: {}, + team_id: req.auth.team_id, + pageOptions, + extractorOptions, + origin: req.body.origin, + is_scrape: true, + }, + {}, + jobId, + jobPriority + ); + + let doc: any | undefined; + try { + doc = (await waitForJob(job.id, timeout))[0]; + } catch (e) { + Logger.error(`Error in scrapeController: ${e}`); + if (e instanceof Error && e.message.startsWith("Job wait")) { + return res.status(408).json({ + success: false, + error: "Request timed out", + }); + } else { + return res.status(500).json({ + success: false, + error: `(Internal server error) - ${e && e?.message ? e.message : e} ${ + extractorOptions && extractorOptions.mode !== "markdown" + ? " - Could be due to LLM parsing issues" + : "" + }`, + }); + } + } + + await job.remove(); + + if (!doc) { + console.error("!!! PANIC DOC IS", doc, job); + return res.status(200).json({ + success: true, + warning: "No page found", + data: doc, + }); + } + + delete doc.index; + delete doc.provider; + + const endTime = new Date().getTime(); + const timeTakenInSeconds = (endTime - startTime) / 1000; + const numTokens = + doc && doc.markdown + ? numTokensFromString(doc.markdown, "gpt-3.5-turbo") + : 0; + + let creditsToBeBilled = 1; // Assuming 1 credit per document + if (earlyReturn) { + // Don't bill if we're early returning + return; + } + if(req.body.extract && req.body.formats.includes("extract")) { + creditsToBeBilled = 50; + } + + const billingResult = await billTeam(req.auth.team_id, creditsToBeBilled); + if (!billingResult.success) { + return res.status(402).json({ + success: false, + error: + "Failed to bill team. Insufficient credits or subscription not found.", + }); + } + + if (!pageOptions || !pageOptions.includeRawHtml) { + if (doc && doc.rawHtml) { + delete doc.rawHtml; + } + } + + if(pageOptions && pageOptions.includeExtract) { + if(!pageOptions.includeMarkdown && doc && doc.markdown) { + delete doc.markdown; + } + } + + logJob({ + job_id: jobId, + success: true, + message: "Scrape completed", + num_docs: 1, + docs: [doc], + time_taken: timeTakenInSeconds, + team_id: req.auth.team_id, + mode: "scrape", + url: req.body.url, + crawlerOptions: {}, + pageOptions: pageOptions, + origin: origin, + extractor_options: { mode: "markdown" }, + num_tokens: numTokens, + }); + + return res.status(200).json({ + success: true, + data: legacyDocumentConverter(doc), + scrape_id: origin?.includes("website") ? jobId : undefined, + }); +} diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts new file mode 100644 index 00000000..c4e0cf84 --- /dev/null +++ b/apps/api/src/controllers/v1/types.ts @@ -0,0 +1,371 @@ +import { Request, Response } from "express"; +import { z } from "zod"; +import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; +import { ExtractorOptions, PageOptions } from "../../lib/entities"; +import { protocolIncluded, checkUrl } from "../../lib/validateUrl"; +import { PlanType } from "../../types"; + +export type Format = + | "markdown" + | "html" + | "rawHtml" + | "links" + | "screenshot" + | "screenshot@fullPage" + | "extract"; + +export const url = z.preprocess( + (x) => { + if (!protocolIncluded(x as string)) { + return `http://${x}`; + } + return x; + }, + z + .string() + .url() + .regex(/^https?:\/\//, "URL uses unsupported protocol") + .refine( + (x) => /\.[a-z]{2,}(\/|$)/i.test(x), + "URL must have a valid top-level domain or be a valid path" + ) + .refine( + (x) => checkUrl(x as string), + "Invalid URL" + ) + .refine( + (x) => !isUrlBlocked(x as string), + "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." + ) +); + +const strictMessage = "Unrecognized key in body -- please review the v1 API documentation for request body changes"; + +export const extractOptions = z.object({ + mode: z.enum(["llm"]).default("llm"), + schema: z.any().optional(), + systemPrompt: z.string().default("Based on the information on the page, extract all the information from the schema. Try to extract all the fields even those that might not be marked as required."), + prompt: z.string().optional() +}).strict(strictMessage); + +export type ExtractOptions = z.infer; + +export const scrapeOptions = z.object({ + formats: z + .enum([ + "markdown", + "html", + "rawHtml", + "links", + "screenshot", + "screenshot@fullPage", + "extract" + ]) + .array() + .optional() + .default(["markdown"]), + headers: z.record(z.string(), z.string()).optional(), + includeTags: z.string().array().optional(), + excludeTags: z.string().array().optional(), + onlyMainContent: z.boolean().default(true), + timeout: z.number().int().positive().finite().safe().default(30000), + waitFor: z.number().int().nonnegative().finite().safe().default(0), + extract: extractOptions.optional(), + parsePDF: z.boolean().default(true), +}).strict(strictMessage) + + +export type ScrapeOptions = z.infer; + +export const scrapeRequestSchema = scrapeOptions.extend({ + url, + origin: z.string().optional().default("api"), +}).strict(strictMessage).refine( + (obj) => { + const hasExtractFormat = obj.formats?.includes("extract"); + const hasExtractOptions = obj.extract !== undefined; + return (hasExtractFormat && hasExtractOptions) || (!hasExtractFormat && !hasExtractOptions); + }, + { + message: "When 'extract' format is specified, 'extract' options must be provided, and vice versa", + } +).transform((obj) => { + if ((obj.formats?.includes("extract") || obj.extract) && !obj.timeout) { + return { ...obj, timeout: 60000 }; + } + return obj; +}); + +// export type ScrapeRequest = { +// url: string; +// formats?: Format[]; +// headers?: { [K: string]: string }; +// includeTags?: string[]; +// excludeTags?: string[]; +// onlyMainContent?: boolean; +// timeout?: number; +// waitFor?: number; +// } + +export type ScrapeRequest = z.infer; + +const crawlerOptions = z.object({ + includePaths: z.string().array().default([]), + excludePaths: z.string().array().default([]), + maxDepth: z.number().default(10), // default? + limit: z.number().default(10000), // default? + allowBackwardLinks: z.boolean().default(false), // >> TODO: CHANGE THIS NAME??? + allowExternalLinks: z.boolean().default(false), + ignoreSitemap: z.boolean().default(true), +}).strict(strictMessage); + +// export type CrawlerOptions = { +// includePaths?: string[]; +// excludePaths?: string[]; +// maxDepth?: number; +// limit?: number; +// allowBackwardLinks?: boolean; // >> TODO: CHANGE THIS NAME??? +// allowExternalLinks?: boolean; +// ignoreSitemap?: boolean; +// }; + +export type CrawlerOptions = z.infer; + +export const crawlRequestSchema = crawlerOptions.extend({ + url, + origin: z.string().optional().default("api"), + scrapeOptions: scrapeOptions.omit({ timeout: true }).default({}), + webhook: z.string().url().optional(), + limit: z.number().default(10000), +}).strict(strictMessage); + +// export type CrawlRequest = { +// url: string; +// crawlerOptions?: CrawlerOptions; +// scrapeOptions?: Exclude; +// }; + +// export type ExtractorOptions = { +// mode: "markdown" | "llm-extraction" | "llm-extraction-from-markdown" | "llm-extraction-from-raw-html"; +// extractionPrompt?: string; +// extractionSchema?: Record; +// } + + +export type CrawlRequest = z.infer; + +export const mapRequestSchema = crawlerOptions.extend({ + url, + origin: z.string().optional().default("api"), + includeSubdomains: z.boolean().default(true), + search: z.string().optional(), + ignoreSitemap: z.boolean().default(false), + limit: z.number().min(1).max(5000).default(5000).optional(), +}).strict(strictMessage); + +// export type MapRequest = { +// url: string; +// crawlerOptions?: CrawlerOptions; +// }; + +export type MapRequest = z.infer; + +export type Document = { + markdown?: string; + extract?: string; + html?: string; + rawHtml?: string; + links?: string[]; + screenshot?: string; + metadata: { + title?: string; + description?: string; + language?: string; + keywords?: string; + robots?: string; + ogTitle?: string; + ogDescription?: string; + ogUrl?: string; + ogImage?: string; + ogAudio?: string; + ogDeterminer?: string; + ogLocale?: string; + ogLocaleAlternate?: string[]; + ogSiteName?: string; + ogVideo?: string; + dcTermsCreated?: string; + dcDateCreated?: string; + dcDate?: string; + dcTermsType?: string; + dcType?: string; + dcTermsAudience?: string; + dcTermsSubject?: string; + dcSubject?: string; + dcDescription?: string; + dcTermsKeywords?: string; + modifiedTime?: string; + publishedTime?: string; + articleTag?: string; + articleSection?: string; + sourceURL?: string; + statusCode?: number; + error?: string; + }; +}; + +export type ErrorResponse = { + success: false; + error: string; + details?: any; +}; + +export type ScrapeResponse = + | ErrorResponse + | { + success: true; + warning?: string; + data: Document; + scrape_id?: string; + }; + +export interface ScrapeResponseRequestTest { + statusCode: number; + body: ScrapeResponse; + error?: string; +} + +export type CrawlResponse = + | ErrorResponse + | { + success: true; + id: string; + url: string; + }; + +export type MapResponse = + | ErrorResponse + | { + success: true; + links: string[]; + scrape_id?: string; + }; + +export type CrawlStatusParams = { + jobId: string; +}; + +export type CrawlStatusResponse = + | ErrorResponse + | { + status: "scraping" | "completed" | "failed" | "cancelled"; + completed: number; + total: number; + creditsUsed: number; + expiresAt: string; + next?: string; + data: Document[]; + }; + +type AuthObject = { + team_id: string; + plan: PlanType; +}; + +type Account = { + remainingCredits: number; +}; + +export interface RequestWithMaybeAuth< + ReqParams = {}, + ReqBody = undefined, + ResBody = undefined +> extends Request { + auth?: AuthObject; + account?: Account; +} + +export interface RequestWithAuth< + ReqParams = {}, + ReqBody = undefined, + ResBody = undefined, +> extends Request { + auth: AuthObject; + account?: Account; +} + +export interface ResponseWithSentry< + ResBody = undefined, +> extends Response { + sentry?: string, +} + +export function legacyCrawlerOptions(x: CrawlerOptions) { + return { + includes: x.includePaths, + excludes: x.excludePaths, + maxCrawledLinks: x.limit, + maxDepth: x.maxDepth, + limit: x.limit, + generateImgAltText: false, + allowBackwardCrawling: x.allowBackwardLinks, + allowExternalContentLinks: x.allowExternalLinks, + }; +} + +export function legacyScrapeOptions(x: ScrapeOptions): PageOptions { + return { + includeMarkdown: x.formats.includes("markdown"), + includeHtml: x.formats.includes("html"), + includeRawHtml: x.formats.includes("rawHtml"), + includeExtract: x.formats.includes("extract"), + onlyIncludeTags: x.includeTags, + removeTags: x.excludeTags, + onlyMainContent: x.onlyMainContent, + waitFor: x.waitFor, + includeLinks: x.formats.includes("links"), + screenshot: x.formats.includes("screenshot"), + fullPageScreenshot: x.formats.includes("screenshot@fullPage"), + parsePDF: x.parsePDF, + }; +} + +export function legacyExtractorOptions(x: ExtractOptions): ExtractorOptions { + return { + mode: x.mode ? "llm-extraction" : "markdown", + extractionPrompt: x.prompt ?? "Based on the information on the page, extract the information from the schema.", + extractionSchema: x.schema, + userPrompt: x.prompt ?? "", + }; +} + +export function legacyDocumentConverter(doc: any): Document { + if (doc === null || doc === undefined) return doc; + + if (doc.metadata) { + if (doc.metadata.screenshot) { + doc.screenshot = doc.metadata.screenshot; + delete doc.metadata.screenshot; + } + + if (doc.metadata.fullPageScreenshot) { + doc.fullPageScreenshot = doc.metadata.fullPageScreenshot; + delete doc.metadata.fullPageScreenshot; + } + } + + return { + markdown: doc.markdown, + links: doc.linksOnPage, + rawHtml: doc.rawHtml, + html: doc.html, + extract: doc.llm_extraction, + screenshot: doc.screenshot ?? doc.fullPageScreenshot, + metadata: { + ...doc.metadata, + pageError: undefined, + pageStatusCode: undefined, + error: doc.metadata.pageError, + statusCode: doc.metadata.pageStatusCode, + }, + }; +} diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index ebe6ef38..58370158 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,8 +1,10 @@ -import express from "express"; +import "dotenv/config"; +import "./services/sentry" +import * as Sentry from "@sentry/node"; +import express, { NextFunction, Request, Response } from "express"; import bodyParser from "body-parser"; import cors from "cors"; -import "dotenv/config"; -import { getWebScraperQueue } from "./services/queue-service"; +import { getScrapeQueue } from "./services/queue-service"; import { v0Router } from "./routes/v0"; import { initSDK } from "@hyperdx/node-opentelemetry"; import cluster from "cluster"; @@ -13,6 +15,12 @@ import { ScrapeEvents } from "./lib/scrape-events"; import http from 'node:http'; import https from 'node:https'; import CacheableLookup from 'cacheable-lookup'; +import { v1Router } from "./routes/v1"; +import expressWs from "express-ws"; +import { crawlStatusWSController } from "./controllers/v1/crawl-status-ws"; +import { ErrorResponse, ResponseWithSentry } from "./controllers/v1/types"; +import { ZodError } from "zod"; +import { v4 as uuidv4 } from "uuid"; const { createBullBoard } = require("@bull-board/api"); const { BullAdapter } = require("@bull-board/api/bullAdapter"); @@ -45,7 +53,8 @@ if (cluster.isMaster) { } }); } else { - const app = express(); + const ws = expressWs(express()); + const app = ws.app; global.isProduction = process.env.IS_PRODUCTION === "true"; @@ -58,7 +67,7 @@ if (cluster.isMaster) { serverAdapter.setBasePath(`/admin/${process.env.BULL_AUTH_KEY}/queues`); const { addQueue, removeQueue, setQueues, replaceQueues } = createBullBoard({ - queues: [new BullAdapter(getWebScraperQueue())], + queues: [new BullAdapter(getScrapeQueue())], serverAdapter: serverAdapter, }); @@ -78,6 +87,7 @@ if (cluster.isMaster) { // register router app.use(v0Router); + app.use("/v1", v1Router); app.use(adminRouter); const DEFAULT_PORT = process.env.PORT ?? 3002; @@ -104,9 +114,9 @@ if (cluster.isMaster) { app.get(`/serverHealthCheck`, async (req, res) => { try { - const webScraperQueue = getWebScraperQueue(); + const scrapeQueue = getScrapeQueue(); const [waitingJobs] = await Promise.all([ - webScraperQueue.getWaitingCount(), + scrapeQueue.getWaitingCount(), ]); const noWaitingJobs = waitingJobs === 0; @@ -115,6 +125,7 @@ if (cluster.isMaster) { waitingJobs, }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } @@ -126,9 +137,9 @@ if (cluster.isMaster) { const timeout = 60000; // 1 minute // The timeout value for the check in milliseconds const getWaitingJobsCount = async () => { - const webScraperQueue = getWebScraperQueue(); + const scrapeQueue = getScrapeQueue(); const [waitingJobsCount] = await Promise.all([ - webScraperQueue.getWaitingCount(), + scrapeQueue.getWaitingCount(), ]); return waitingJobsCount; @@ -166,6 +177,7 @@ if (cluster.isMaster) { }, timeout); } } catch (error) { + Sentry.captureException(error); Logger.debug(error); } }; @@ -178,14 +190,46 @@ if (cluster.isMaster) { res.send({ isProduction: global.isProduction }); }); + app.use((err: unknown, req: Request<{}, ErrorResponse, undefined>, res: Response, next: NextFunction) => { + if (err instanceof ZodError) { + res.status(400).json({ success: false, error: "Bad Request", details: err.errors }); + } else { + next(err); + } + }); + + Sentry.setupExpressErrorHandler(app); + + app.use((err: unknown, req: Request<{}, ErrorResponse, undefined>, res: ResponseWithSentry, next: NextFunction) => { + const id = res.sentry ?? uuidv4(); + let verbose = JSON.stringify(err); + if (verbose === "{}") { + if (err instanceof Error) { + verbose = JSON.stringify({ + message: err.message, + name: err.name, + stack: err.stack, + }); + } + } + + Logger.error("Error occurred in request! (" + req.path + ") -- ID " + id + " -- " + verbose); + res.status(500).json({ success: false, error: "An unexpected error occurred. Please contact hello@firecrawl.com for help. Your exception ID is " + id }); + }); + Logger.info(`Worker ${process.pid} started`); } -const wsq = getWebScraperQueue(); -wsq.on("waiting", j => ScrapeEvents.logJobEvent(j, "waiting")); -wsq.on("active", j => ScrapeEvents.logJobEvent(j, "active")); -wsq.on("completed", j => ScrapeEvents.logJobEvent(j, "completed")); -wsq.on("paused", j => ScrapeEvents.logJobEvent(j, "paused")); -wsq.on("resumed", j => ScrapeEvents.logJobEvent(j, "resumed")); -wsq.on("removed", j => ScrapeEvents.logJobEvent(j, "removed")); + +// const sq = getScrapeQueue(); + +// sq.on("waiting", j => ScrapeEvents.logJobEvent(j, "waiting")); +// sq.on("active", j => ScrapeEvents.logJobEvent(j, "active")); +// sq.on("completed", j => ScrapeEvents.logJobEvent(j, "completed")); +// sq.on("paused", j => ScrapeEvents.logJobEvent(j, "paused")); +// sq.on("resumed", j => ScrapeEvents.logJobEvent(j, "resumed")); +// sq.on("removed", j => ScrapeEvents.logJobEvent(j, "removed")); + + + diff --git a/apps/api/src/lib/LLM-extraction/index.ts b/apps/api/src/lib/LLM-extraction/index.ts index 85a7e995..d05f9bd7 100644 --- a/apps/api/src/lib/LLM-extraction/index.ts +++ b/apps/api/src/lib/LLM-extraction/index.ts @@ -15,7 +15,8 @@ export async function generateCompletions( // const schema = zodToJsonSchema(options.schema) const schema = extractionOptions.extractionSchema; - const prompt = extractionOptions.extractionPrompt; + const systemPrompt = extractionOptions.extractionPrompt; + const prompt = extractionOptions.userPrompt; const switchVariable = "openAI"; // Placholder, want to think more about how we abstract the model provider @@ -24,30 +25,35 @@ export async function generateCompletions( switch (switchVariable) { case "openAI": const llm = new OpenAI(); - try{ - const completionResult = await generateOpenAICompletions({ - client: llm, - document: document, - schema: schema, - prompt: prompt, - mode: mode, - }); - // Validate the JSON output against the schema using AJV - const validate = ajv.compile(schema); - if (!validate(completionResult.llm_extraction)) { - //TODO: add Custom Error handling middleware that bubbles this up with proper Error code, etc. - throw new Error( - `JSON parsing error(s): ${validate.errors - ?.map((err) => err.message) - .join(", ")}\n\nLLM extraction did not match the extraction schema you provided. This could be because of a model hallucination, or an Error on our side. Try adjusting your prompt, and if it doesn't work reach out to support.` - ); - } + try { + const completionResult = await generateOpenAICompletions({ + client: llm, + document: document, + schema: schema, + prompt: prompt, + systemPrompt: systemPrompt, + mode: mode, + }); + // Validate the JSON output against the schema using AJV + if (schema) { + const validate = ajv.compile(schema); + if (!validate(completionResult.llm_extraction)) { + //TODO: add Custom Error handling middleware that bubbles this up with proper Error code, etc. + throw new Error( + `JSON parsing error(s): ${validate.errors + ?.map((err) => err.message) + .join( + ", " + )}\n\nLLM extraction did not match the extraction schema you provided. This could be because of a model hallucination, or an Error on our side. Try adjusting your prompt, and if it doesn't work reach out to support.` + ); + } + } - return completionResult; - } catch (error) { - Logger.error(`Error generating completions: ${error}`); - throw new Error(`Error generating completions: ${error.message}`); - } + return completionResult; + } catch (error) { + Logger.error(`Error generating completions: ${error}`); + throw error; + } default: throw new Error("Invalid client"); } diff --git a/apps/api/src/lib/LLM-extraction/models.ts b/apps/api/src/lib/LLM-extraction/models.ts index e696a8cd..23147b12 100644 --- a/apps/api/src/lib/LLM-extraction/models.ts +++ b/apps/api/src/lib/LLM-extraction/models.ts @@ -15,8 +15,7 @@ const defaultPrompt = function prepareOpenAIDoc( document: Document, mode: "markdown" | "raw-html" -): [OpenAI.Chat.Completions.ChatCompletionContentPart[], number] { - +): [OpenAI.Chat.Completions.ChatCompletionContentPart[], number] | null { let markdown = document.markdown; let extractionTarget = document.markdown; @@ -27,78 +26,120 @@ function prepareOpenAIDoc( // Check if the markdown content exists in the document if (!extractionTarget) { - throw new Error( - `${mode} content is missing in the document. This is likely due to an error in the scraping process. Please try again or reach out to help@mendable.ai` - ); + return null; + // throw new Error( + // `${mode} content is missing in the document. This is likely due to an error in the scraping process. Please try again or reach out to help@mendable.ai` + // ); } - - - // count number of tokens const numTokens = numTokensFromString(extractionTarget, "gpt-4"); if (numTokens > maxTokens) { // trim the document to the maximum number of tokens, tokens != characters - extractionTarget = extractionTarget.slice(0, (maxTokens * modifier)); + extractionTarget = extractionTarget.slice(0, maxTokens * modifier); } - return [[{ type: "text", text: extractionTarget }], numTokens]; } export async function generateOpenAICompletions({ client, - model = process.env.MODEL_NAME || "gpt-4o", + model = process.env.MODEL_NAME || "gpt-4o-mini", document, schema, //TODO - add zod dynamic type checking - prompt = defaultPrompt, + systemPrompt = defaultPrompt, + prompt, temperature, - mode + mode, }: { client: OpenAI; model?: string; document: Document; schema: any; // This should be replaced with a proper Zod schema type when available prompt?: string; + systemPrompt?: string; temperature?: number; mode: "markdown" | "raw-html"; }): Promise { const openai = client as OpenAI; - const [content, numTokens] = prepareOpenAIDoc(document, mode); + const preparedDoc = prepareOpenAIDoc(document, mode); - const completion = await openai.chat.completions.create({ - model, - messages: [ - { - role: "system", - content: prompt, - }, - { role: "user", content }, - ], - tools: [ - { - type: "function", - function: { - name: "extract_content", - description: "Extracts the content from the given webpage(s)", - parameters: schema, + if (preparedDoc === null) { + return { + ...document, + warning: + "LLM extraction was not performed since the document's content is empty or missing.", + }; + } + const [content, numTokens] = preparedDoc; + + let completion; + let llmExtraction; + if (prompt && !schema) { + const jsonCompletion = await openai.chat.completions.create({ + model, + messages: [ + { + role: "system", + content: systemPrompt, }, - }, - ], - tool_choice: { "type": "function", "function": {"name": "extract_content"}}, - temperature, - }); + { role: "user", content }, + { + role: "user", + content: `Transform the above content into structured json output based on the following user request: ${prompt}`, + }, + ], + response_format: { type: "json_object" }, + temperature, + }); - const c = completion.choices[0].message.tool_calls[0].function.arguments; + try { + llmExtraction = JSON.parse( + jsonCompletion.choices[0].message.content.trim() + ); + } catch (e) { + throw new Error("Invalid JSON"); + } + } else { + completion = await openai.chat.completions.create({ + model, + messages: [ + { + role: "system", + content: systemPrompt, + }, + { role: "user", content }, + ], + tools: [ + { + type: "function", + function: { + name: "extract_content", + description: "Extracts the content from the given webpage(s)", + parameters: schema, + }, + }, + ], + tool_choice: { type: "function", function: { name: "extract_content" } }, + temperature, + }); + const c = completion.choices[0].message.tool_calls[0].function.arguments; - // Extract the LLM extraction content from the completion response - const llmExtraction = JSON.parse(c); + // Extract the LLM extraction content from the completion response + try { + llmExtraction = JSON.parse(c); + } catch (e) { + throw new Error("Invalid JSON"); + } + } // Return the document with the LLM extraction content added return { ...document, llm_extraction: llmExtraction, - warning: numTokens > maxTokens ? `Page was trimmed to fit the maximum token limit defined by the LLM model (Max: ${maxTokens} tokens, Attemped: ${numTokens} tokens). If results are not good, email us at help@mendable.ai so we can help you.` : undefined, + warning: + numTokens > maxTokens + ? `Page was trimmed to fit the maximum token limit defined by the LLM model (Max: ${maxTokens} tokens, Attemped: ${numTokens} tokens). If results are not good, email us at help@mendable.ai so we can help you.` + : undefined, }; } - diff --git a/apps/api/src/lib/__tests__/job-priority.test.ts b/apps/api/src/lib/__tests__/job-priority.test.ts new file mode 100644 index 00000000..82477379 --- /dev/null +++ b/apps/api/src/lib/__tests__/job-priority.test.ts @@ -0,0 +1,134 @@ +import { + getJobPriority, + addJobPriority, + deleteJobPriority, +} from "../job-priority"; +import { redisConnection } from "../../services/queue-service"; +import { PlanType } from "../../types"; + +jest.mock("../../services/queue-service", () => ({ + redisConnection: { + sadd: jest.fn(), + srem: jest.fn(), + scard: jest.fn(), + expire: jest.fn(), + }, +})); + +describe("Job Priority Tests", () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + test("addJobPriority should add job_id to the set and set expiration", async () => { + const team_id = "team1"; + const job_id = "job1"; + await addJobPriority(team_id, job_id); + expect(redisConnection.sadd).toHaveBeenCalledWith( + `limit_team_id:${team_id}`, + job_id + ); + expect(redisConnection.expire).toHaveBeenCalledWith( + `limit_team_id:${team_id}`, + 60 + ); + }); + + test("deleteJobPriority should remove job_id from the set", async () => { + const team_id = "team1"; + const job_id = "job1"; + await deleteJobPriority(team_id, job_id); + expect(redisConnection.srem).toHaveBeenCalledWith( + `limit_team_id:${team_id}`, + job_id + ); + }); + + test("getJobPriority should return correct priority based on plan and set length", async () => { + const team_id = "team1"; + const plan: PlanType = "standard"; + (redisConnection.scard as jest.Mock).mockResolvedValue(150); + + const priority = await getJobPriority({ plan, team_id }); + expect(priority).toBe(10); + + (redisConnection.scard as jest.Mock).mockResolvedValue(250); + const priorityExceeded = await getJobPriority({ plan, team_id }); + expect(priorityExceeded).toBe(20); // basePriority + Math.ceil((250 - 200) * 0.4) + }); + + test("getJobPriority should handle different plans correctly", async () => { + const team_id = "team1"; + + (redisConnection.scard as jest.Mock).mockResolvedValue(50); + let plan: PlanType = "hobby"; + let priority = await getJobPriority({ plan, team_id }); + expect(priority).toBe(10); + + (redisConnection.scard as jest.Mock).mockResolvedValue(150); + plan = "hobby"; + priority = await getJobPriority({ plan, team_id }); + expect(priority).toBe(25); // basePriority + Math.ceil((150 - 50) * 0.3) + + (redisConnection.scard as jest.Mock).mockResolvedValue(25); + plan = "free"; + priority = await getJobPriority({ plan, team_id }); + expect(priority).toBe(10); + + (redisConnection.scard as jest.Mock).mockResolvedValue(60); + plan = "free"; + priority = await getJobPriority({ plan, team_id }); + expect(priority).toBe(28); // basePriority + Math.ceil((60 - 25) * 0.5) + }); + + test("addJobPriority should reset expiration time when adding new job", async () => { + const team_id = "team1"; + const job_id1 = "job1"; + const job_id2 = "job2"; + + await addJobPriority(team_id, job_id1); + expect(redisConnection.expire).toHaveBeenCalledWith( + `limit_team_id:${team_id}`, + 60 + ); + + // Clear the mock calls + (redisConnection.expire as jest.Mock).mockClear(); + + // Add another job + await addJobPriority(team_id, job_id2); + expect(redisConnection.expire).toHaveBeenCalledWith( + `limit_team_id:${team_id}`, + 60 + ); + }); + + test("Set should expire after 60 seconds", async () => { + const team_id = "team1"; + const job_id = "job1"; + + jest.useFakeTimers(); + + await addJobPriority(team_id, job_id); + expect(redisConnection.expire).toHaveBeenCalledWith( + `limit_team_id:${team_id}`, + 60 + ); + + // Fast-forward time by 59 seconds + jest.advanceTimersByTime(59000); + + // The set should still exist + expect(redisConnection.scard).not.toHaveBeenCalled(); + + // Fast-forward time by 2 more seconds (total 61 seconds) + jest.advanceTimersByTime(2000); + + // Check if the set has been removed (scard should return 0) + (redisConnection.scard as jest.Mock).mockResolvedValue(0); + const setSize = await redisConnection.scard(`limit_team_id:${team_id}`); + expect(setSize).toBe(0); + + jest.useRealTimers(); + }); +}); diff --git a/apps/api/src/lib/checkCredits.ts b/apps/api/src/lib/checkCredits.ts new file mode 100644 index 00000000..7e9d988d --- /dev/null +++ b/apps/api/src/lib/checkCredits.ts @@ -0,0 +1,32 @@ +import { checkTeamCredits } from "../services/billing/credit_billing"; +import { Logger } from "./logger"; + +type checkCreditsResponse = { + status: number; + error: string | null; +} + +export const checkCredits = async (team_id: string): Promise => { + try { + const { + success: creditsCheckSuccess, + message: creditsCheckMessage + } = await checkTeamCredits(team_id, 1); + if (!creditsCheckSuccess) { + return { + status: 402, + error: "Insufficient credits" + }; + } + } catch (error) { + Logger.error(error); + return { + status: 500, + error: "Error checking team credits. Please contact hello@firecrawl.com for help." + }; + } + return { + status: 200, + error: null + } +}; \ No newline at end of file diff --git a/apps/api/src/lib/crawl-redis.ts b/apps/api/src/lib/crawl-redis.ts new file mode 100644 index 00000000..9240018e --- /dev/null +++ b/apps/api/src/lib/crawl-redis.ts @@ -0,0 +1,124 @@ +import { WebCrawler } from "../scraper/WebScraper/crawler"; +import { redisConnection } from "../services/queue-service"; + +export type StoredCrawl = { + originUrl: string; + crawlerOptions: any; + pageOptions: any; + team_id: string; + plan: string; + robots?: string; + cancelled?: boolean; + createdAt: number; +}; + +export async function saveCrawl(id: string, crawl: StoredCrawl) { + await redisConnection.set("crawl:" + id, JSON.stringify(crawl)); + await redisConnection.expire("crawl:" + id, 24 * 60 * 60, "NX"); +} + +export async function getCrawl(id: string): Promise { + const x = await redisConnection.get("crawl:" + id); + + if (x === null) { + return null; + } + + return JSON.parse(x); +} + +export async function getCrawlExpiry(id: string): Promise { + const d = new Date(); + const ttl = await redisConnection.pttl("crawl:" + id); + d.setMilliseconds(d.getMilliseconds() + ttl); + d.setMilliseconds(0); + return d; +} + +export async function addCrawlJob(id: string, job_id: string) { + await redisConnection.sadd("crawl:" + id + ":jobs", job_id); + await redisConnection.expire("crawl:" + id + ":jobs", 24 * 60 * 60, "NX"); +} + +export async function addCrawlJobs(id: string, job_ids: string[]) { + await redisConnection.sadd("crawl:" + id + ":jobs", ...job_ids); + await redisConnection.expire("crawl:" + id + ":jobs", 24 * 60 * 60, "NX"); +} + +export async function addCrawlJobDone(id: string, job_id: string) { + await redisConnection.sadd("crawl:" + id + ":jobs_done", job_id); + await redisConnection.lpush("crawl:" + id + ":jobs_done_ordered", job_id); + await redisConnection.expire("crawl:" + id + ":jobs_done", 24 * 60 * 60, "NX"); + await redisConnection.expire("crawl:" + id + ":jobs_done_ordered", 24 * 60 * 60, "NX"); +} + +export async function getDoneJobsOrderedLength(id: string): Promise { + return await redisConnection.llen("crawl:" + id + ":jobs_done_ordered"); +} + +export async function getDoneJobsOrdered(id: string, start = 0, end = -1): Promise { + return await redisConnection.lrange("crawl:" + id + ":jobs_done_ordered", start, end); +} + +export async function isCrawlFinished(id: string) { + return (await redisConnection.scard("crawl:" + id + ":jobs_done")) === (await redisConnection.scard("crawl:" + id + ":jobs")); +} + +export async function isCrawlFinishedLocked(id: string) { + return (await redisConnection.exists("crawl:" + id + ":finish")); +} + +export async function finishCrawl(id: string) { + if (await isCrawlFinished(id)) { + const set = await redisConnection.setnx("crawl:" + id + ":finish", "yes"); + if (set === 1) { + await redisConnection.expire("crawl:" + id + ":finish", 24 * 60 * 60); + } + return set === 1 + } +} + +export async function getCrawlJobs(id: string): Promise { + return await redisConnection.smembers("crawl:" + id + ":jobs"); +} + +export async function lockURL(id: string, sc: StoredCrawl, url: string): Promise { + if (typeof sc.crawlerOptions?.limit === "number") { + if (await redisConnection.scard("crawl:" + id + ":visited") >= sc.crawlerOptions.limit) { + return false; + } + } + const res = (await redisConnection.sadd("crawl:" + id + ":visited", url)) !== 0 + await redisConnection.expire("crawl:" + id + ":visited", 24 * 60 * 60, "NX"); + return res; +} + +/// NOTE: does not check limit. only use if limit is checked beforehand e.g. with sitemap +export async function lockURLs(id: string, urls: string[]): Promise { + const res = (await redisConnection.sadd("crawl:" + id + ":visited", ...urls)) !== 0 + await redisConnection.expire("crawl:" + id + ":visited", 24 * 60 * 60, "NX"); + return res; +} + +export function crawlToCrawler(id: string, sc: StoredCrawl): WebCrawler { + const crawler = new WebCrawler({ + jobId: id, + initialUrl: sc.originUrl, + includes: sc.crawlerOptions?.includes ?? [], + excludes: sc.crawlerOptions?.excludes ?? [], + maxCrawledLinks: sc.crawlerOptions?.maxCrawledLinks ?? 1000, + maxCrawledDepth: sc.crawlerOptions?.maxDepth ?? 10, + limit: sc.crawlerOptions?.limit ?? 10000, + generateImgAltText: sc.crawlerOptions?.generateImgAltText ?? false, + allowBackwardCrawling: sc.crawlerOptions?.allowBackwardCrawling ?? false, + allowExternalContentLinks: sc.crawlerOptions?.allowExternalContentLinks ?? false, + }); + + if (sc.robots !== undefined) { + try { + crawler.importRobotsTxt(sc.robots); + } catch (_) {} + } + + return crawler; +} diff --git a/apps/api/src/lib/custom-error.ts b/apps/api/src/lib/custom-error.ts index 20a01cb6..2ffe52e9 100644 --- a/apps/api/src/lib/custom-error.ts +++ b/apps/api/src/lib/custom-error.ts @@ -19,3 +19,4 @@ export class CustomError extends Error { Object.setPrototypeOf(this, CustomError.prototype); } } + diff --git a/apps/api/src/lib/default-values.ts b/apps/api/src/lib/default-values.ts index 152f47d7..f70f17c0 100644 --- a/apps/api/src/lib/default-values.ts +++ b/apps/api/src/lib/default-values.ts @@ -1,6 +1,6 @@ export const defaultOrigin = "api"; -export const defaultTimeout = 45000; // 45 seconds +export const defaultTimeout = 60000; // 60 seconds export const defaultPageOptions = { onlyMainContent: false, @@ -12,7 +12,8 @@ export const defaultPageOptions = { }; export const defaultCrawlerOptions = { - allowBackwardCrawling: false + allowBackwardCrawling: false, + limit: 10000 } export const defaultCrawlPageOptions = { diff --git a/apps/api/src/lib/entities.ts b/apps/api/src/lib/entities.ts index 922e4b6a..dfd17c63 100644 --- a/apps/api/src/lib/entities.ts +++ b/apps/api/src/lib/entities.ts @@ -11,6 +11,8 @@ export interface Progress { } export type PageOptions = { + includeMarkdown?: boolean; + includeExtract?: boolean; onlyMainContent?: boolean; includeHtml?: boolean; includeRawHtml?: boolean; @@ -24,12 +26,17 @@ export type PageOptions = { parsePDF?: boolean; removeTags?: string | string[]; onlyIncludeTags?: string | string[]; + includeLinks?: boolean; + useFastMode?: boolean; // beta + disableJSDom?: boolean; // beta + atsv?: boolean; // beta }; export type ExtractorOptions = { mode: "markdown" | "llm-extraction" | "llm-extraction-from-markdown" | "llm-extraction-from-raw-html"; extractionPrompt?: string; extractionSchema?: Record; + userPrompt?: string; } export type SearchOptions = { @@ -65,6 +72,8 @@ export type WebScraperOptions = { extractorOptions?: ExtractorOptions; concurrentRequests?: number; bullJobId?: string; + priority?: number; + teamId?: string; }; export interface DocumentUrl { @@ -141,4 +150,5 @@ export interface FireEngineOptions{ blockMedia?: boolean; blockAds?: boolean; disableJsDom?: boolean; + atsv?: boolean; // beta } diff --git a/apps/api/src/lib/html-to-markdown.ts b/apps/api/src/lib/html-to-markdown.ts index 233da921..002cb7be 100644 --- a/apps/api/src/lib/html-to-markdown.ts +++ b/apps/api/src/lib/html-to-markdown.ts @@ -1,5 +1,5 @@ -export function parseMarkdown(html: string) { +export async function parseMarkdown(html: string) { var TurndownService = require("turndown"); var turndownPluginGfm = require('joplin-turndown-plugin-gfm') @@ -21,7 +21,27 @@ export function parseMarkdown(html: string) { }); var gfm = turndownPluginGfm.gfm; turndownService.use(gfm); - let markdownContent = turndownService.turndown(html); + let markdownContent = ""; + const turndownPromise = new Promise((resolve, reject) => { + try { + const result = turndownService.turndown(html); + resolve(result); + } catch (error) { + reject("Error converting HTML to Markdown: " + error); + } + }); + + const timeoutPromise = new Promise((resolve, reject) => { + const timeout = 5000; // Timeout in milliseconds + setTimeout(() => reject("Conversion timed out after " + timeout + "ms"), timeout); + }); + + try { + markdownContent = await Promise.race([turndownPromise, timeoutPromise]); + } catch (error) { + console.error(error); + return ""; // Optionally return an empty string or handle the error as needed + } // multiple line links let insideLinkContent = false; diff --git a/apps/api/src/lib/job-priority.ts b/apps/api/src/lib/job-priority.ts new file mode 100644 index 00000000..bb6158f9 --- /dev/null +++ b/apps/api/src/lib/job-priority.ts @@ -0,0 +1,91 @@ +import { redisConnection } from "../../src/services/queue-service"; +import { PlanType } from "../../src/types"; +import { Logger } from "./logger"; + +const SET_KEY_PREFIX = "limit_team_id:"; +export async function addJobPriority(team_id, job_id) { + try { + const setKey = SET_KEY_PREFIX + team_id; + + // Add scrape job id to the set + await redisConnection.sadd(setKey, job_id); + + // This approach will reset the expiration time to 60 seconds every time a new job is added to the set. + await redisConnection.expire(setKey, 60); + } catch (e) { + Logger.error(`Add job priority (sadd) failed: ${team_id}, ${job_id}`); + } +} + +export async function deleteJobPriority(team_id, job_id) { + try { + const setKey = SET_KEY_PREFIX + team_id; + + // remove job_id from the set + await redisConnection.srem(setKey, job_id); + } catch (e) { + Logger.error(`Delete job priority (srem) failed: ${team_id}, ${job_id}`); + } +} + +export async function getJobPriority({ + plan, + team_id, + basePriority = 10, +}: { + plan: PlanType; + team_id: string; + basePriority?: number; +}): Promise { + try { + const setKey = SET_KEY_PREFIX + team_id; + + // Get the length of the set + const setLength = await redisConnection.scard(setKey); + + // Determine the priority based on the plan and set length + let planModifier = 1; + let bucketLimit = 0; + + switch (plan) { + case "free": + bucketLimit = 25; + planModifier = 0.5; + break; + case "hobby": + bucketLimit = 100; + planModifier = 0.3; + break; + case "standard": + case "standardnew": + bucketLimit = 200; + planModifier = 0.2; + break; + case "growth": + case "growthdouble": + bucketLimit = 400; + planModifier = 0.1; + break; + + default: + bucketLimit = 25; + planModifier = 1; + break; + } + + // if length set is smaller than set, just return base priority + if (setLength <= bucketLimit) { + return basePriority; + } else { + // If not, we keep base priority + planModifier + return Math.ceil( + basePriority + Math.ceil((setLength - bucketLimit) * planModifier) + ); + } + } catch (e) { + Logger.error( + `Get job priority failed: ${team_id}, ${plan}, ${basePriority}` + ); + return basePriority; + } +} diff --git a/apps/api/src/lib/map-cosine.ts b/apps/api/src/lib/map-cosine.ts new file mode 100644 index 00000000..db2491a9 --- /dev/null +++ b/apps/api/src/lib/map-cosine.ts @@ -0,0 +1,46 @@ +import { Logger } from "./logger"; + +export function performCosineSimilarity(links: string[], searchQuery: string) { + try { + // Function to calculate cosine similarity + const cosineSimilarity = (vec1: number[], vec2: number[]): number => { + const dotProduct = vec1.reduce((sum, val, i) => sum + val * vec2[i], 0); + const magnitude1 = Math.sqrt( + vec1.reduce((sum, val) => sum + val * val, 0) + ); + const magnitude2 = Math.sqrt( + vec2.reduce((sum, val) => sum + val * val, 0) + ); + if (magnitude1 === 0 || magnitude2 === 0) return 0; + return dotProduct / (magnitude1 * magnitude2); + }; + + // Function to convert text to vector + const textToVector = (text: string): number[] => { + const words = searchQuery.toLowerCase().split(/\W+/); + return words.map((word) => { + const count = (text.toLowerCase().match(new RegExp(word, "g")) || []) + .length; + return count / text.length; + }); + }; + + // Calculate similarity scores + const similarityScores = links.map((link) => { + const linkVector = textToVector(link); + const searchVector = textToVector(searchQuery); + return cosineSimilarity(linkVector, searchVector); + }); + + // Sort links based on similarity scores and print scores + const a = links + .map((link, index) => ({ link, score: similarityScores[index] })) + .sort((a, b) => b.score - a.score); + + links = a.map((item) => item.link); + return links; + } catch (error) { + Logger.error(`Error performing cosine similarity: ${error}`); + return links; + } +} diff --git a/apps/api/src/lib/scrape-events.ts b/apps/api/src/lib/scrape-events.ts index 02ef670b..ed011b78 100644 --- a/apps/api/src/lib/scrape-events.ts +++ b/apps/api/src/lib/scrape-events.ts @@ -1,4 +1,4 @@ -import { Job, JobId } from "bull"; +import { Job } from "bullmq"; import type { baseScrapers } from "../scraper/WebScraper/single_url"; import { supabase_service as supabase } from "../services/supabase"; import { Logger } from "./logger"; @@ -71,7 +71,7 @@ export class ScrapeEvents { } } - static async logJobEvent(job: Job | JobId, event: ScrapeQueueEvent["event"]) { + static async logJobEvent(job: Job | any, event: ScrapeQueueEvent["event"]) { try { await this.insert(((job as any).id ? (job as any).id : job) as string, { type: "queue", diff --git a/apps/api/src/lib/supabase-jobs.ts b/apps/api/src/lib/supabase-jobs.ts index 1f9531e5..cda6fd46 100644 --- a/apps/api/src/lib/supabase-jobs.ts +++ b/apps/api/src/lib/supabase-jobs.ts @@ -1,10 +1,12 @@ import { supabase_service } from "../services/supabase"; +import { Logger } from "./logger"; +import * as Sentry from "@sentry/node"; export const supabaseGetJobById = async (jobId: string) => { const { data, error } = await supabase_service - .from('firecrawl_jobs') - .select('*') - .eq('job_id', jobId) + .from("firecrawl_jobs") + .select("*") + .eq("job_id", jobId) .single(); if (error) { @@ -16,4 +18,41 @@ export const supabaseGetJobById = async (jobId: string) => { } return data; -} +}; + +export const supabaseGetJobsById = async (jobIds: string[]) => { + const { data, error } = await supabase_service.rpc("get_jobs_by_ids", { + job_ids: jobIds, + }); + + if (error) { + Logger.error(`Error in get_jobs_by_ids: ${error}`); + Sentry.captureException(error); + return []; + } + + if (!data) { + return []; + } + + return data; +}; + + +export const supabaseGetJobByIdOnlyData = async (jobId: string) => { + const { data, error } = await supabase_service + .from("firecrawl_jobs") + .select("docs, team_id") + .eq("job_id", jobId) + .single(); + + if (error) { + return null; + } + + if (!data) { + return null; + } + + return data; +}; \ No newline at end of file diff --git a/apps/api/src/lib/validateUrl.test.ts b/apps/api/src/lib/validateUrl.test.ts new file mode 100644 index 00000000..eec39f97 --- /dev/null +++ b/apps/api/src/lib/validateUrl.test.ts @@ -0,0 +1,159 @@ +import { isSameDomain, removeDuplicateUrls } from "./validateUrl"; +import { isSameSubdomain } from "./validateUrl"; + +describe("isSameDomain", () => { + it("should return true for a subdomain", () => { + const result = isSameDomain("http://sub.example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return true for the same domain", () => { + const result = isSameDomain("http://example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return false for different domains", () => { + const result = isSameDomain("http://example.com", "http://another.com"); + expect(result).toBe(false); + }); + + it("should return true for a subdomain with different protocols", () => { + const result = isSameDomain("https://sub.example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return false for invalid URLs", () => { + const result = isSameDomain("invalid-url", "http://example.com"); + expect(result).toBe(false); + const result2 = isSameDomain("http://example.com", "invalid-url"); + expect(result2).toBe(false); + }); + + it("should return true for a subdomain with www prefix", () => { + const result = isSameDomain("http://www.sub.example.com", "http://example.com"); + expect(result).toBe(true); + }); + + it("should return true for the same domain with www prefix", () => { + const result = isSameDomain("http://docs.s.s.example.com", "http://example.com"); + expect(result).toBe(true); + }); +}); + + + + +describe("isSameSubdomain", () => { + it("should return false for a subdomain", () => { + const result = isSameSubdomain("http://example.com", "http://docs.example.com"); + expect(result).toBe(false); + }); + + it("should return true for the same subdomain", () => { + const result = isSameSubdomain("http://docs.example.com", "http://docs.example.com"); + expect(result).toBe(true); + }); + + it("should return false for different subdomains", () => { + const result = isSameSubdomain("http://docs.example.com", "http://blog.example.com"); + expect(result).toBe(false); + }); + + it("should return false for different domains", () => { + const result = isSameSubdomain("http://example.com", "http://another.com"); + expect(result).toBe(false); + }); + + it("should return false for invalid URLs", () => { + const result = isSameSubdomain("invalid-url", "http://example.com"); + expect(result).toBe(false); + const result2 = isSameSubdomain("http://example.com", "invalid-url"); + expect(result2).toBe(false); + }); + + it("should return true for the same subdomain with different protocols", () => { + const result = isSameSubdomain("https://docs.example.com", "http://docs.example.com"); + expect(result).toBe(true); + }); + + it("should return true for the same subdomain with www prefix", () => { + const result = isSameSubdomain("http://www.docs.example.com", "http://docs.example.com"); + expect(result).toBe(true); + }); + + it("should return false for a subdomain with www prefix and different subdomain", () => { + const result = isSameSubdomain("http://www.docs.example.com", "http://blog.example.com"); + expect(result).toBe(false); + }); +}); + +describe("removeDuplicateUrls", () => { + it("should remove duplicate URLs with different protocols", () => { + const urls = [ + "http://example.com", + "https://example.com", + "http://www.example.com", + "https://www.example.com" + ]; + const result = removeDuplicateUrls(urls); + expect(result).toEqual(["https://example.com"]); + }); + + it("should keep URLs with different paths", () => { + const urls = [ + "https://example.com/page1", + "https://example.com/page2", + "https://example.com/page1?param=1", + "https://example.com/page1#section1" + ]; + const result = removeDuplicateUrls(urls); + expect(result).toEqual([ + "https://example.com/page1", + "https://example.com/page2", + "https://example.com/page1?param=1", + "https://example.com/page1#section1" + ]); + }); + + it("should prefer https over http", () => { + const urls = [ + "http://example.com", + "https://example.com" + ]; + const result = removeDuplicateUrls(urls); + expect(result).toEqual(["https://example.com"]); + }); + + it("should prefer non-www over www", () => { + const urls = [ + "https://www.example.com", + "https://example.com" + ]; + const result = removeDuplicateUrls(urls); + expect(result).toEqual(["https://example.com"]); + }); + + it("should handle empty input", () => { + const urls: string[] = []; + const result = removeDuplicateUrls(urls); + expect(result).toEqual([]); + }); + + it("should handle URLs with different cases", () => { + const urls = [ + "https://EXAMPLE.com", + "https://example.com" + ]; + const result = removeDuplicateUrls(urls); + expect(result).toEqual(["https://EXAMPLE.com"]); + }); + + it("should handle URLs with trailing slashes", () => { + const urls = [ + "https://example.com", + "https://example.com/" + ]; + const result = removeDuplicateUrls(urls); + expect(result).toEqual(["https://example.com"]); + }); +}); diff --git a/apps/api/src/lib/validateUrl.ts b/apps/api/src/lib/validateUrl.ts new file mode 100644 index 00000000..14a74de8 --- /dev/null +++ b/apps/api/src/lib/validateUrl.ts @@ -0,0 +1,170 @@ +export const protocolIncluded = (url: string) => { + // if :// not in the start of the url assume http (maybe https?) + // regex checks if :// appears before any . + return /^([^.:]+:\/\/)/.test(url); +}; + +const getURLobj = (s: string) => { + // URL fails if we dont include the protocol ie google.com + let error = false; + let urlObj = {}; + try { + urlObj = new URL(s); + } catch (err) { + error = true; + } + return { error, urlObj }; +}; + +export const checkAndUpdateURL = (url: string) => { + if (!protocolIncluded(url)) { + url = `http://${url}`; + } + + const { error, urlObj } = getURLobj(url); + if (error) { + throw new Error("Invalid URL"); + } + + const typedUrlObj = urlObj as URL; + + if (typedUrlObj.protocol !== "http:" && typedUrlObj.protocol !== "https:") { + throw new Error("Invalid URL"); + } + + return { urlObj: typedUrlObj, url: url }; +}; + +export const checkUrl = (url: string) => { + const { error, urlObj } = getURLobj(url); + if (error) { + throw new Error("Invalid URL"); + } + + const typedUrlObj = urlObj as URL; + + if (typedUrlObj.protocol !== "http:" && typedUrlObj.protocol !== "https:") { + throw new Error("Invalid URL"); + } + + if ((url.split(".")[0].match(/:/g) || []).length !== 1) { + throw new Error("Invalid URL. Invalid protocol."); // for this one: http://http://example.com + } + + return url; +}; + +/** + * Same domain check + * It checks if the domain of the url is the same as the base url + * It accounts true for subdomains and www.subdomains + * @param url + * @param baseUrl + * @returns + */ +export function isSameDomain(url: string, baseUrl: string) { + const { urlObj: urlObj1, error: error1 } = getURLobj(url); + const { urlObj: urlObj2, error: error2 } = getURLobj(baseUrl); + + if (error1 || error2) { + return false; + } + + const typedUrlObj1 = urlObj1 as URL; + const typedUrlObj2 = urlObj2 as URL; + + const cleanHostname = (hostname: string) => { + return hostname.startsWith('www.') ? hostname.slice(4) : hostname; + }; + + const domain1 = cleanHostname(typedUrlObj1.hostname).split('.').slice(-2).join('.'); + const domain2 = cleanHostname(typedUrlObj2.hostname).split('.').slice(-2).join('.'); + + return domain1 === domain2; +} + + +export function isSameSubdomain(url: string, baseUrl: string) { + const { urlObj: urlObj1, error: error1 } = getURLobj(url); + const { urlObj: urlObj2, error: error2 } = getURLobj(baseUrl); + + if (error1 || error2) { + return false; + } + + const typedUrlObj1 = urlObj1 as URL; + const typedUrlObj2 = urlObj2 as URL; + + const cleanHostname = (hostname: string) => { + return hostname.startsWith('www.') ? hostname.slice(4) : hostname; + }; + + const domain1 = cleanHostname(typedUrlObj1.hostname).split('.').slice(-2).join('.'); + const domain2 = cleanHostname(typedUrlObj2.hostname).split('.').slice(-2).join('.'); + + const subdomain1 = cleanHostname(typedUrlObj1.hostname).split('.').slice(0, -2).join('.'); + const subdomain2 = cleanHostname(typedUrlObj2.hostname).split('.').slice(0, -2).join('.'); + + // Check if the domains are the same and the subdomains are the same + return domain1 === domain2 && subdomain1 === subdomain2; +} + + +export const checkAndUpdateURLForMap = (url: string) => { + if (!protocolIncluded(url)) { + url = `http://${url}`; + } + // remove last slash if present + if (url.endsWith("/")) { + url = url.slice(0, -1); + } + + + const { error, urlObj } = getURLobj(url); + if (error) { + throw new Error("Invalid URL"); + } + + const typedUrlObj = urlObj as URL; + + if (typedUrlObj.protocol !== "http:" && typedUrlObj.protocol !== "https:") { + throw new Error("Invalid URL"); + } + + // remove any query params + url = url.split("?")[0].trim(); + + return { urlObj: typedUrlObj, url: url }; +}; + + + + + +export function removeDuplicateUrls(urls: string[]): string[] { + const urlMap = new Map(); + + for (const url of urls) { + const parsedUrl = new URL(url); + const protocol = parsedUrl.protocol; + const hostname = parsedUrl.hostname.replace(/^www\./, ''); + const path = parsedUrl.pathname + parsedUrl.search + parsedUrl.hash; + + const key = `${hostname}${path}`; + + if (!urlMap.has(key)) { + urlMap.set(key, url); + } else { + const existingUrl = new URL(urlMap.get(key)!); + const existingProtocol = existingUrl.protocol; + + if (protocol === 'https:' && existingProtocol === 'http:') { + urlMap.set(key, url); + } else if (protocol === existingProtocol && !parsedUrl.hostname.startsWith('www.') && existingUrl.hostname.startsWith('www.')) { + urlMap.set(key, url); + } + } + } + + return [...new Set(Array.from(urlMap.values()))]; +} \ No newline at end of file diff --git a/apps/api/src/main/runWebScraper.ts b/apps/api/src/main/runWebScraper.ts index 3f3293b2..2268f9ed 100644 --- a/apps/api/src/main/runWebScraper.ts +++ b/apps/api/src/main/runWebScraper.ts @@ -1,4 +1,4 @@ -import { Job } from "bull"; +import { Job } from "bullmq"; import { CrawlResult, WebScraperOptions, @@ -15,15 +15,23 @@ import { ScrapeEvents } from "../lib/scrape-events"; export async function startWebScraperPipeline({ job, + token, }: { job: Job; + token: string; }) { let partialDocs: Document[] = []; return (await runWebScraper({ url: job.data.url, mode: job.data.mode, crawlerOptions: job.data.crawlerOptions, - pageOptions: job.data.pageOptions, + extractorOptions: job.data.extractorOptions, + pageOptions: { + ...job.data.pageOptions, + ...(job.data.crawl_id ? ({ + includeRawHtml: true, + }): {}), + }, inProgress: (progress) => { Logger.debug(`🐂 Job in progress ${job.id}`); if (progress.currentDocument) { @@ -31,20 +39,22 @@ export async function startWebScraperPipeline({ if (partialDocs.length > 50) { partialDocs = partialDocs.slice(-50); } - job.progress({ ...progress, partialDocs: partialDocs }); + // job.updateProgress({ ...progress, partialDocs: partialDocs }); } }, - onSuccess: (result) => { + onSuccess: (result, mode) => { Logger.debug(`🐂 Job completed ${job.id}`); - saveJob(job, result); + saveJob(job, result, token, mode); }, onError: (error) => { Logger.error(`🐂 Job failed ${job.id}`); ScrapeEvents.logJobEvent(job, "failed"); - job.moveToFailed(error); + job.moveToFailed(error, token, false); }, team_id: job.data.team_id, bull_job_id: job.id.toString(), + priority: job.opts.priority, + is_scrape: job.data.is_scrape ?? false, })) as { success: boolean; message: string; docs: Document[] }; } export async function runWebScraper({ @@ -52,11 +62,14 @@ export async function runWebScraper({ mode, crawlerOptions, pageOptions, + extractorOptions, inProgress, onSuccess, onError, team_id, bull_job_id, + priority, + is_scrape=false, }: RunWebScraperParams): Promise { try { const provider = new WebScraperDataProvider(); @@ -65,17 +78,22 @@ export async function runWebScraper({ jobId: bull_job_id, mode: mode, urls: [url], + extractorOptions, crawlerOptions: crawlerOptions, pageOptions: pageOptions, bullJobId: bull_job_id, + priority, }); } else { await provider.setOptions({ jobId: bull_job_id, mode: mode, urls: url.split(","), + extractorOptions, crawlerOptions: crawlerOptions, pageOptions: pageOptions, + priority, + teamId: team_id }); } const docs = (await provider.getDocuments(false, (progress: Progress) => { @@ -97,21 +115,24 @@ export async function runWebScraper({ return { url: doc.metadata.sourceURL }; } }) - : docs.filter((doc) => doc.content.trim().length > 0); + : docs; - const billingResult = await billTeam(team_id, filteredDocs.length); - - if (!billingResult.success) { - // throw new Error("Failed to bill team, no subscription was found"); - return { - success: false, - message: "Failed to bill team, no subscription was found", - docs: [], - }; + if(is_scrape === false) { + const billingResult = await billTeam(team_id, filteredDocs.length); + if (!billingResult.success) { + // throw new Error("Failed to bill team, no subscription was found"); + return { + success: false, + message: "Failed to bill team, no subscription was found", + docs: [], + }; + } } + + // This is where the returnvalue from the job is set - onSuccess(filteredDocs); + onSuccess(filteredDocs, mode); // this return doesn't matter too much for the job completion result return { success: true, message: "", docs: filteredDocs }; @@ -121,7 +142,7 @@ export async function runWebScraper({ } } -const saveJob = async (job: Job, result: any) => { +const saveJob = async (job: Job, result: any, token: string, mode: string) => { try { const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; if (useDbAuthentication) { @@ -131,17 +152,21 @@ const saveJob = async (job: Job, result: any) => { .eq("job_id", job.id); if (error) throw new Error(error.message); - try { - await job.moveToCompleted(null, false, false); - } catch (error) { - // I think the job won't exist here anymore - } - } else { - try { - await job.moveToCompleted(result, false, false); - } catch (error) { - // I think the job won't exist here anymore - } + // try { + // if (mode === "crawl") { + // await job.moveToCompleted(null, token, false); + // } else { + // await job.moveToCompleted(result, token, false); + // } + // } catch (error) { + // // I think the job won't exist here anymore + // } + // } else { + // try { + // await job.moveToCompleted(result, token, false); + // } catch (error) { + // // I think the job won't exist here anymore + // } } ScrapeEvents.logJobEvent(job, "completed"); } catch (error) { diff --git a/apps/api/src/routes/admin.ts b/apps/api/src/routes/admin.ts index 77d1bf46..38611eac 100644 --- a/apps/api/src/routes/admin.ts +++ b/apps/api/src/routes/admin.ts @@ -1,10 +1,11 @@ import express from "express"; -import { redisHealthController } from "../controllers/admin/redis-health"; +import { redisHealthController } from "../controllers/v0/admin/redis-health"; import { + autoscalerController, checkQueuesController, cleanBefore24hCompleteJobsController, queuesController, -} from "../controllers/admin/queue"; +} from "../controllers/v0/admin/queue"; export const adminRouter = express.Router(); @@ -27,3 +28,8 @@ adminRouter.get( `/admin/${process.env.BULL_AUTH_KEY}/queues`, queuesController ); + +adminRouter.get( + `/admin/${process.env.BULL_AUTH_KEY}/autoscaler`, + autoscalerController +); diff --git a/apps/api/src/routes/v0.ts b/apps/api/src/routes/v0.ts index 9c68d9bb..3a7bda65 100644 --- a/apps/api/src/routes/v0.ts +++ b/apps/api/src/routes/v0.ts @@ -1,14 +1,14 @@ import express from "express"; -import { crawlController } from "../../src/controllers/crawl"; -import { crawlStatusController } from "../../src/controllers/crawl-status"; -import { scrapeController } from "../../src/controllers/scrape"; -import { crawlPreviewController } from "../../src/controllers/crawlPreview"; -import { crawlJobStatusPreviewController } from "../../src/controllers/status"; -import { searchController } from "../../src/controllers/search"; -import { crawlCancelController } from "../../src/controllers/crawl-cancel"; -import { keyAuthController } from "../../src/controllers/keyAuth"; -import { livenessController } from "../controllers/liveness"; -import { readinessController } from "../controllers/readiness"; +import { crawlController } from "../../src/controllers/v0/crawl"; +import { crawlStatusController } from "../../src/controllers/v0/crawl-status"; +import { scrapeController } from "../../src/controllers/v0/scrape"; +import { crawlPreviewController } from "../../src/controllers/v0/crawlPreview"; +import { crawlJobStatusPreviewController } from "../../src/controllers/v0/status"; +import { searchController } from "../../src/controllers/v0/search"; +import { crawlCancelController } from "../../src/controllers/v0/crawl-cancel"; +import { keyAuthController } from "../../src/controllers/v0/keyAuth"; +import { livenessController } from "../controllers/v0/liveness"; +import { readinessController } from "../controllers/v0/readiness"; export const v0Router = express.Router(); diff --git a/apps/api/src/routes/v1.ts b/apps/api/src/routes/v1.ts new file mode 100644 index 00000000..9dcbf111 --- /dev/null +++ b/apps/api/src/routes/v1.ts @@ -0,0 +1,156 @@ +import express, { NextFunction, Request, Response } from "express"; +import { crawlController } from "../controllers/v1/crawl"; +// import { crawlStatusController } from "../../src/controllers/v1/crawl-status"; +import { scrapeController } from "../../src/controllers/v1/scrape"; +import { crawlStatusController } from "../controllers/v1/crawl-status"; +import { mapController } from "../controllers/v1/map"; +import { ErrorResponse, RequestWithAuth, RequestWithMaybeAuth } from "../controllers/v1/types"; +import { RateLimiterMode } from "../types"; +import { authenticateUser } from "../controllers/auth"; +import { createIdempotencyKey } from "../services/idempotency/create"; +import { validateIdempotencyKey } from "../services/idempotency/validate"; +import { checkTeamCredits } from "../services/billing/credit_billing"; +import expressWs from "express-ws"; +import { crawlStatusWSController } from "../controllers/v1/crawl-status-ws"; +import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; +import { crawlCancelController } from "../controllers/v1/crawl-cancel"; +import { Logger } from "../lib/logger"; +import { scrapeStatusController } from "../controllers/v1/scrape-status"; +// import { crawlPreviewController } from "../../src/controllers/v1/crawlPreview"; +// import { crawlJobStatusPreviewController } from "../../src/controllers/v1/status"; +// import { searchController } from "../../src/controllers/v1/search"; +// import { crawlCancelController } from "../../src/controllers/v1/crawl-cancel"; +// import { keyAuthController } from "../../src/controllers/v1/keyAuth"; +// import { livenessController } from "../controllers/v1/liveness"; +// import { readinessController } from "../controllers/v1/readiness"; + +function checkCreditsMiddleware(minimum?: number): (req: RequestWithAuth, res: Response, next: NextFunction) => void { + return (req, res, next) => { + (async () => { + if (!minimum && req.body) { + minimum = (req.body as any)?.limit ?? 1; + } + const { success, message, remainingCredits } = await checkTeamCredits(req.auth.team_id, minimum); + if (!success) { + Logger.error(`Insufficient credits: ${JSON.stringify({ team_id: req.auth.team_id, minimum, remainingCredits })}`); + return res.status(402).json({ success: false, error: "Insufficient credits" }); + } + req.account = { remainingCredits } + next(); + })() + .catch(err => next(err)); + }; +} + +export function authMiddleware(rateLimiterMode: RateLimiterMode): (req: RequestWithMaybeAuth, res: Response, next: NextFunction) => void { + return (req, res, next) => { + (async () => { + const { success, team_id, error, status, plan } = await authenticateUser( + req, + res, + rateLimiterMode, + ); + + if (!success) { + return res.status(status).json({ success: false, error }); + } + + req.auth = { team_id, plan }; + next(); + })() + .catch(err => next(err)); + } +} + +function idempotencyMiddleware(req: Request, res: Response, next: NextFunction) { + (async () => { + if (req.headers["x-idempotency-key"]) { + const isIdempotencyValid = await validateIdempotencyKey(req); + if (!isIdempotencyValid) { + return res.status(409).json({ success: false, error: "Idempotency key already used" }); + } + createIdempotencyKey(req); + } + next(); + })() + .catch(err => next(err)); +} + +function blocklistMiddleware(req: Request, res: Response, next: NextFunction) { + if (req.body.url && isUrlBlocked(req.body.url)) { + return res.status(403).json({ success: false, error: "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." }); + } + next(); +} + +function wrap(controller: (req: Request, res: Response) => Promise): (req: Request, res: Response, next: NextFunction) => any { + return (req, res, next) => { + controller(req, res) + .catch(err => next(err)) + } +} + +expressWs(express()); + +export const v1Router = express.Router(); + +v1Router.post( + "/scrape", + blocklistMiddleware, + authMiddleware(RateLimiterMode.Scrape), + checkCreditsMiddleware(1), + wrap(scrapeController) +); + +v1Router.post( + "/crawl", + blocklistMiddleware, + authMiddleware(RateLimiterMode.Crawl), + idempotencyMiddleware, + checkCreditsMiddleware(), + wrap(crawlController) +); + +v1Router.post( + "/map", + blocklistMiddleware, + authMiddleware(RateLimiterMode.Map), + checkCreditsMiddleware(1), + wrap(mapController) +); + +v1Router.get( + "/crawl/:jobId", + authMiddleware(RateLimiterMode.CrawlStatus), + wrap(crawlStatusController) +); + +v1Router.get( + "/scrape/:jobId", + wrap(scrapeStatusController) +); + +v1Router.ws( + "/crawl/:jobId", + crawlStatusWSController +); + +// v1Router.post("/crawlWebsitePreview", crawlPreviewController); + + +v1Router.delete( + "/crawl/:jobId", + authMiddleware(RateLimiterMode.Crawl), + crawlCancelController +); +// v1Router.get("/checkJobStatus/:jobId", crawlJobStatusPreviewController); + +// // Auth route for key based authentication +// v1Router.get("/keyAuth", keyAuthController); + +// // Search routes +// v0Router.post("/search", searchController); + +// Health/Probe routes +// v1Router.get("/health/liveness", livenessController); +// v1Router.get("/health/readiness", readinessController); diff --git a/apps/api/src/run-req.ts b/apps/api/src/run-req.ts new file mode 100644 index 00000000..6d29916d --- /dev/null +++ b/apps/api/src/run-req.ts @@ -0,0 +1,175 @@ +import axios from "axios"; +import { promises as fs } from "fs"; +import { v4 as uuidV4 } from "uuid"; + +interface Result { + start_url: string; + job_id?: string; + idempotency_key?: string; + result_data_jsonb?: any; +} + +async function sendCrawl(result: Result): Promise { + const idempotencyKey = uuidV4(); + const url = result.start_url; + try { + const response = await axios.post( + "https://staging-firecrawl-scraper-js.fly.dev/v0/crawl", + { + url: url, + crawlerOptions: { + limit: 75, + }, + pageOptions: { + includeHtml: true, + replaceAllPathsWithAbsolutePaths: true, + waitFor: 1000, + }, + }, + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer `, + }, + } + ); + result.idempotency_key = idempotencyKey; + return response.data.jobId; + } catch (error) { + console.error("Error sending crawl:", error); + return undefined; + } +} + +async function getContent(result: Result): Promise { + let attempts = 0; + while (attempts < 120) { + // Reduce the number of attempts to speed up + try { + const response = await axios.get( + `https://staging-firecrawl-scraper-js.fly.dev/v0/crawl/status/${result.job_id}`, + { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer `, + }, + } + ); + if (response.data.status === "completed") { + result.result_data_jsonb = response.data.data; + // Job actually completed + return true; + } + } catch (error) { + console.error("Error getting content:", error); + } + const randomSleep = Math.floor(Math.random() * 15000) + 5000; + await new Promise((resolve) => setTimeout(resolve, randomSleep)); // Reduce sleep time to 1.5 seconds + attempts++; + } + // Set result as null if timed out + result.result_data_jsonb = null; + return false; +} + +async function processResults(results: Result[]): Promise { + let processedCount = 0; + let starterCount = 0; + const queue: Result[] = []; + const processedUrls = new Set(); + + // Initialize the queue with the first 1000 results + for (let i = 0; i < Math.min(100, results.length); i++) { + queue.push(results[i]); + processedUrls.add(results[i].start_url); + } + + // Function to process a single result + const processSingleResult = async (result: Result) => { + const jobId = await sendCrawl(result); + if (jobId) { + console.log(`Job requested count: ${starterCount}`); + starterCount++; + result.job_id = jobId; + processedCount++; + // Save the result to the file + try { + // Save job id along with the start_url + const resultWithJobId = results.map(r => ({ + start_url: r.start_url, + job_id: r.job_id, + })); + await fs.writeFile( + "results_with_job_id_4000_6000.json", + JSON.stringify(resultWithJobId, null, 4) + ); + } catch (error) { + console.error("Error writing to results_with_content.json:", error); + } + + // Add a new result to the queue if there are more results to process + // if (processedCount < results.length) { + // for (let i = queue.length; i < results.length; i++) { + // if (!processedUrls.has(results[i].start_url)) { + // const nextResult = results[i]; + // console.log("Next result:", nextResult.start_url); + // queue.push(nextResult); + // processedUrls.add(nextResult.start_url); + // console.log(`Queue length: ${queue.length}`); + // processSingleResult(nextResult); + // break; + // } + // } + // } + } + }; + + // Start processing the initial queue concurrently + // for (let i = 0; i < queue.length; i++) { + // processSingleResult(queue[i]); + // if ((i + 1) % 500 === 0) { + // console.log(`Processed ${i + 1} results, waiting for 1 minute before adding the next batch...`); + // await new Promise(resolve => setTimeout(resolve, 60 * 1000)); // Wait for 1 minute + // } + // } + // Start processing the initial queue concurrently + // await Promise.all(queue.map(result => processSingleResult(result))); + for (let i = 0; i < results.length; i += 100) { + const batch = results.slice(i, i + 100); + Promise.all(batch.map((result) => processSingleResult(result))) + .then(() => { + console.log(`Processed ${i + 100} results.`); + }) + .catch((error) => { + console.error(`Error processing batch starting at index ${i}:`, error); + }); + await new Promise((resolve) => setTimeout(resolve, 60 * 1000)); // Wait for 1 minute + } +} + +// Example call + +async function getStartUrls(): Promise { + try { + const data = await fs.readFile("starturls.json", "utf-8"); + return JSON.parse(data); + } catch (error) { + console.error("Error reading starturls.json:", error); + return []; + } +} + +async function main() { + const results: Result[] = (await getStartUrls()).slice(3999, 6000); + // console.log(results.map((r) => r.start_url).slice(0, 3)); + + processResults(results) + .then(() => { + console.log("All results processed."); + }) + .catch((error) => { + console.error("Error processing results:", error); + }); +} + +main(); diff --git a/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts b/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts index d555e030..02c8a7e0 100644 --- a/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts +++ b/apps/api/src/scraper/WebScraper/__tests__/single_url.test.ts @@ -24,7 +24,7 @@ describe('scrapSingleUrl', () => { }); it('should return a list of links on the firecrawl.ai page', async () => { - const url = 'https://example.com'; + const url = 'https://flutterbricks.com'; const pageOptions: PageOptions = { includeHtml: true }; const result = await scrapSingleUrl("TEST", url, pageOptions); @@ -33,5 +33,5 @@ it('should return a list of links on the firecrawl.ai page', async () => { expect(result.linksOnPage).toBeDefined(); expect(Array.isArray(result.linksOnPage)).toBe(true); expect(result.linksOnPage.length).toBeGreaterThan(0); - expect(result.linksOnPage).toContain('https://www.iana.org/domains/example') -}, 10000); + expect(result.linksOnPage).toContain('https://flutterbricks.com/features') +}, 15000); diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index fc0eee3e..d5dadaf8 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -1,4 +1,4 @@ -import axios from "axios"; +import axios, { AxiosError } from "axios"; import cheerio, { load } from "cheerio"; import { URL } from "url"; import { getLinksFromSitemap } from "./sitemap"; @@ -22,7 +22,7 @@ export class WebCrawler { private crawledUrls: Map = new Map(); private limit: number; private robotsTxtUrl: string; - private robots: any; + public robots: any; private generateImgAltText: boolean; private allowBackwardCrawling: boolean; private allowExternalContentLinks: boolean; @@ -53,8 +53,8 @@ export class WebCrawler { this.jobId = jobId; this.initialUrl = initialUrl; this.baseUrl = new URL(initialUrl).origin; - this.includes = includes ?? []; - this.excludes = excludes ?? []; + this.includes = Array.isArray(includes) ? includes : []; + this.excludes = Array.isArray(excludes) ? excludes : []; this.limit = limit; this.robotsTxtUrl = `${this.baseUrl}/robots.txt`; this.robots = robotsParser(this.robotsTxtUrl, ""); @@ -66,10 +66,16 @@ export class WebCrawler { this.allowExternalContentLinks = allowExternalContentLinks ?? false; } - private filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { + public filterLinks(sitemapLinks: string[], limit: number, maxDepth: number): string[] { return sitemapLinks .filter((link) => { - const url = new URL(link.trim(), this.baseUrl); + let url: URL; + try { + url = new URL(link.trim(), this.baseUrl); + } catch (error) { + Logger.debug(`Error processing link: ${link} | Error: ${error.message}`); + return false; + } const path = url.pathname; const depth = getURLDepth(url.toString()); @@ -102,7 +108,12 @@ export class WebCrawler { // Normalize the initial URL and the link to account for www and non-www versions const normalizedInitialUrl = new URL(this.initialUrl); - const normalizedLink = new URL(link); + let normalizedLink; + try { + normalizedLink = new URL(link); + } catch (_) { + return false; + } const initialHostname = normalizedInitialUrl.hostname.replace(/^www\./, ''); const linkHostname = normalizedLink.hostname.replace(/^www\./, ''); @@ -130,6 +141,25 @@ export class WebCrawler { .slice(0, limit); } + public async getRobotsTxt(): Promise { + const response = await axios.get(this.robotsTxtUrl, { timeout: axiosTimeout }); + return response.data; + } + + public importRobotsTxt(txt: string) { + this.robots = robotsParser(this.robotsTxtUrl, txt); + } + + public async tryGetSitemap(): Promise<{ url: string; html: string; }[] | null> { + Logger.debug(`Fetching sitemap links from ${this.initialUrl}`); + const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl); + if (sitemapLinks.length > 0) { + let filteredLinks = this.filterLinks(sitemapLinks, this.limit, this.maxCrawledDepth); + return filteredLinks.map(link => ({ url: link, html: "" })); + } + return null; + } + public async start( inProgress?: (progress: Progress) => void, pageOptions?: PageOptions, @@ -142,19 +172,17 @@ export class WebCrawler { Logger.debug(`Crawler starting with ${this.initialUrl}`); // Fetch and parse robots.txt try { - const response = await axios.get(this.robotsTxtUrl, { timeout: axiosTimeout }); - this.robots = robotsParser(this.robotsTxtUrl, response.data); + const txt = await this.getRobotsTxt(); + this.importRobotsTxt(txt); Logger.debug(`Crawler robots.txt fetched with ${this.robotsTxtUrl}`); } catch (error) { Logger.debug(`Failed to fetch robots.txt from ${this.robotsTxtUrl}`); } if (!crawlerOptions?.ignoreSitemap){ - Logger.debug(`Fetching sitemap links from ${this.initialUrl}`); - const sitemapLinks = await this.tryFetchSitemapLinks(this.initialUrl); - if (sitemapLinks.length > 0) { - let filteredLinks = this.filterLinks(sitemapLinks, limit, maxDepth); - return filteredLinks.map(link => ({ url: link, html: "" })); + const sm = await this.tryGetSitemap(); + if (sm !== null) { + return sm; } } @@ -241,6 +269,63 @@ export class WebCrawler { return Array.from(this.crawledUrls.entries()).map(([url, html]) => ({ url, html })); } + public filterURL(href: string, url: string): string | null { + let fullUrl = href; + if (!href.startsWith("http")) { + try { + fullUrl = new URL(href, this.baseUrl).toString(); + } catch (_) { + return null; + } + } + let urlObj; + try { + urlObj = new URL(fullUrl); + } catch (_) { + return null; + } + const path = urlObj.pathname; + + if (this.isInternalLink(fullUrl)) { // INTERNAL LINKS + if (this.isInternalLink(fullUrl) && + this.noSections(fullUrl) && + !this.matchesExcludes(path) && + this.isRobotsAllowed(fullUrl) + ) { + return fullUrl; + } + } else { // EXTERNAL LINKS + if ( + this.isInternalLink(url) && + this.allowExternalContentLinks && + !this.isSocialMediaOrEmail(fullUrl) && + !this.matchesExcludes(fullUrl, true) && + !this.isExternalMainPage(fullUrl) + ) { + return fullUrl; + } + } + + return null; + } + + public extractLinksFromHTML(html: string, url: string) { + let links: string[] = []; + + const $ = load(html); + $("a").each((_, element) => { + const href = $(element).attr("href"); + if (href) { + const u = this.filterURL(href, url); + if (u !== null) { + links.push(u); + } + } + }); + + return links; + } + async crawl(url: string, pageOptions: PageOptions): Promise<{url: string, html: string, pageStatusCode?: number, pageError?: string}[]> { if (this.visited.has(url) || !this.robots.isAllowed(url, "FireCrawlAgent")) { return []; @@ -284,37 +369,7 @@ export class WebCrawler { links.push({ url, html: content, pageStatusCode, pageError }); } - $("a").each((_, element) => { - const href = $(element).attr("href"); - if (href) { - let fullUrl = href; - if (!href.startsWith("http")) { - fullUrl = new URL(href, this.baseUrl).toString(); - } - const urlObj = new URL(fullUrl); - const path = urlObj.pathname; - - if (this.isInternalLink(fullUrl)) { // INTERNAL LINKS - if (this.isInternalLink(fullUrl) && - this.noSections(fullUrl) && - !this.matchesExcludes(path) && - this.isRobotsAllowed(fullUrl) - ) { - links.push({ url: fullUrl, html: content, pageStatusCode, pageError }); - } - } else { // EXTERNAL LINKS - if ( - this.isInternalLink(url) && - this.allowExternalContentLinks && - !this.isSocialMediaOrEmail(fullUrl) && - !this.matchesExcludes(fullUrl, true) && - !this.isExternalMainPage(fullUrl) - ) { - links.push({ url: fullUrl, html: content, pageStatusCode, pageError }); - } - } - } - }); + links.push(...this.extractLinksFromHTML(content, url).map(url => ({ url, html: content, pageStatusCode, pageError }))); if (this.visited.size === 1) { return links; @@ -465,9 +520,13 @@ export class WebCrawler { } } catch (error) { Logger.debug(`Failed to fetch sitemap with axios from ${sitemapUrl}: ${error}`); - const response = await getLinksFromSitemap({ sitemapUrl, mode: 'fire-engine' }); - if (response) { - sitemapLinks = response; + if (error instanceof AxiosError && error.response?.status === 404) { + // ignore 404 + } else { + const response = await getLinksFromSitemap({ sitemapUrl, mode: 'fire-engine' }); + if (response) { + sitemapLinks = response; + } } } @@ -480,7 +539,11 @@ export class WebCrawler { } } catch (error) { Logger.debug(`Failed to fetch sitemap from ${baseUrlSitemap}: ${error}`); - sitemapLinks = await getLinksFromSitemap({ sitemapUrl: baseUrlSitemap, mode: 'fire-engine' }); + if (error instanceof AxiosError && error.response?.status === 404) { + // ignore 404 + } else { + sitemapLinks = await getLinksFromSitemap({ sitemapUrl: baseUrlSitemap, mode: 'fire-engine' }); + } } } diff --git a/apps/api/src/scraper/WebScraper/index.ts b/apps/api/src/scraper/WebScraper/index.ts index 859127bd..fc828224 100644 --- a/apps/api/src/scraper/WebScraper/index.ts +++ b/apps/api/src/scraper/WebScraper/index.ts @@ -16,7 +16,6 @@ import { replacePathsWithAbsolutePaths, } from "./utils/replacePaths"; import { generateCompletions } from "../../lib/LLM-extraction"; -import { getWebScraperQueue } from "../../../src/services/queue-service"; import { fetchAndProcessDocx } from "./utils/docxProcessor"; import { getAdjustedMaxDepth, getURLDepth } from "./utils/maxDepthUtils"; import { Logger } from "../../lib/logger"; @@ -44,6 +43,8 @@ export class WebScraperDataProvider { private crawlerMode: string = "default"; private allowBackwardCrawling: boolean = false; private allowExternalContentLinks: boolean = false; + private priority?: number; + private teamId?: string; authorize(): void { throw new Error("Method not implemented."); @@ -72,7 +73,9 @@ export class WebScraperDataProvider { url, this.pageOptions, this.extractorOptions, - existingHTML + existingHTML, + this.priority, + this.teamId, ); processedUrls++; if (inProgress) { @@ -88,21 +91,6 @@ export class WebScraperDataProvider { results[i + index] = result; }) ); - try { - if (this.mode === "crawl" && this.bullJobId) { - const job = await getWebScraperQueue().getJob(this.bullJobId); - const jobStatus = await job.getState(); - if (jobStatus === "failed") { - Logger.info( - "Job has failed or has been cancelled by the user. Stopping the job..." - ); - return [] as Document[]; - } - } - } catch (error) { - Logger.error(error.message); - return [] as Document[]; - } } return results.filter((result) => result !== null) as Document[]; } @@ -306,28 +294,32 @@ export class WebScraperDataProvider { documents = await this.getSitemapData(this.urls[0], documents); } - documents = this.applyPathReplacements(documents); - // documents = await this.applyImgAltText(documents); - if ( - (this.extractorOptions.mode === "llm-extraction" || - this.extractorOptions.mode === "llm-extraction-from-markdown") && - this.mode === "single_urls" - ) { - documents = await generateCompletions( - documents, - this.extractorOptions, - "markdown" - ); + if (this.pageOptions.includeMarkdown) { + documents = this.applyPathReplacements(documents); } - if ( - this.extractorOptions.mode === "llm-extraction-from-raw-html" && - this.mode === "single_urls" - ) { - documents = await generateCompletions( - documents, - this.extractorOptions, - "raw-html" - ); + + if (!this.pageOptions.includeHtml) { + for (let document of documents) { + delete document.html; + } + } + + // documents = await this.applyImgAltText(documents); + if (this.mode === "single_urls" && this.pageOptions.includeExtract) { + const extractionMode = this.extractorOptions?.mode ?? "markdown"; + const completionMode = extractionMode === "llm-extraction-from-raw-html" ? "raw-html" : "markdown"; + + if ( + extractionMode === "llm-extraction" || + extractionMode === "llm-extraction-from-markdown" || + extractionMode === "llm-extraction-from-raw-html" + ) { + documents = await generateCompletions( + documents, + this.extractorOptions, + completionMode + ); + } } return documents.concat(pdfDocuments).concat(docxDocuments); } @@ -359,6 +351,7 @@ export class WebScraperDataProvider { }); return { content: content, + markdown: content, metadata: { sourceURL: pdfLink, pageStatusCode, pageError }, provider: "web-scraper", }; @@ -581,12 +574,21 @@ export class WebScraperDataProvider { this.limit = options.crawlerOptions?.limit ?? 10000; this.generateImgAltText = options.crawlerOptions?.generateImgAltText ?? false; - this.pageOptions = options.pageOptions ?? { - onlyMainContent: false, - includeHtml: false, - replaceAllPathsWithAbsolutePaths: false, - parsePDF: true, - removeTags: [], + this.pageOptions = { + onlyMainContent: options.pageOptions?.onlyMainContent ?? false, + includeHtml: options.pageOptions?.includeHtml ?? false, + replaceAllPathsWithAbsolutePaths: options.pageOptions?.replaceAllPathsWithAbsolutePaths ?? true, + parsePDF: options.pageOptions?.parsePDF ?? true, + onlyIncludeTags: options.pageOptions?.onlyIncludeTags ?? [], + removeTags: options.pageOptions?.removeTags ?? [], + includeMarkdown: options.pageOptions?.includeMarkdown ?? true, + includeRawHtml: options.pageOptions?.includeRawHtml ?? false, + includeExtract: options.pageOptions?.includeExtract ?? (options.extractorOptions?.mode && options.extractorOptions?.mode !== "markdown") ?? false, + waitFor: options.pageOptions?.waitFor ?? undefined, + headers: options.pageOptions?.headers ?? undefined, + includeLinks: options.pageOptions?.includeLinks ?? true, + fullPageScreenshot: options.pageOptions?.fullPageScreenshot ?? false, + screenshot: options.pageOptions?.screenshot ?? false, }; this.extractorOptions = options.extractorOptions ?? { mode: "markdown" }; this.replaceAllPathsWithAbsolutePaths = @@ -608,6 +610,10 @@ export class WebScraperDataProvider { options.crawlerOptions?.allowBackwardCrawling ?? false; this.allowExternalContentLinks = options.crawlerOptions?.allowExternalContentLinks ?? false; + this.priority = options.priority; + this.teamId = options.teamId ?? null; + + // make sure all urls start with https:// this.urls = this.urls.map((url) => { diff --git a/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts b/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts index 0bb9986f..aa86ad5e 100644 --- a/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts +++ b/apps/api/src/scraper/WebScraper/scrapers/fireEngine.ts @@ -5,6 +5,7 @@ import { generateRequestParams } from "../single_url"; import { fetchAndProcessPdf } from "../utils/pdfProcessor"; import { universalTimeout } from "../global"; import { Logger } from "../../../lib/logger"; +import * as Sentry from "@sentry/node"; /** * Scrapes a URL with Fire-Engine @@ -22,19 +23,23 @@ export async function scrapWithFireEngine({ waitFor = 0, screenshot = false, fullPageScreenshot = false, - pageOptions = { parsePDF: true }, + pageOptions = { parsePDF: true, atsv: false, useFastMode: false, disableJsDom: false }, fireEngineOptions = {}, headers, options, + priority, + teamId, }: { url: string; waitFor?: number; screenshot?: boolean; fullPageScreenshot?: boolean; - pageOptions?: { scrollXPaths?: string[]; parsePDF?: boolean }; + pageOptions?: { scrollXPaths?: string[]; parsePDF?: boolean, atsv?: boolean, useFastMode?: boolean, disableJsDom?: boolean }; fireEngineOptions?: FireEngineOptions; headers?: Record; options?: any; + priority?: number; + teamId?: string; }): Promise { const logParams = { url, @@ -49,11 +54,11 @@ export async function scrapWithFireEngine({ try { const reqParams = await generateRequestParams(url); - const waitParam = reqParams["params"]?.wait ?? waitFor; - const engineParam = reqParams["params"]?.engine ?? reqParams["params"]?.fireEngineOptions?.engine ?? fireEngineOptions?.engine ?? "playwright"; - const screenshotParam = reqParams["params"]?.screenshot ?? screenshot; - const fullPageScreenshotParam = reqParams["params"]?.fullPageScreenshot ?? fullPageScreenshot; - const fireEngineOptionsParam : FireEngineOptions = reqParams["params"]?.fireEngineOptions ?? fireEngineOptions; + let waitParam = reqParams["params"]?.wait ?? waitFor; + let engineParam = reqParams["params"]?.engine ?? reqParams["params"]?.fireEngineOptions?.engine ?? fireEngineOptions?.engine ?? "playwright"; + let screenshotParam = reqParams["params"]?.screenshot ?? screenshot; + let fullPageScreenshotParam = reqParams["params"]?.fullPageScreenshot ?? fullPageScreenshot; + let fireEngineOptionsParam : FireEngineOptions = reqParams["params"]?.fireEngineOptions ?? fireEngineOptions; let endpoint = "/scrape"; @@ -68,47 +73,101 @@ export async function scrapWithFireEngine({ `⛏️ Fire-Engine (${engine}): Scraping ${url} | params: { wait: ${waitParam}, screenshot: ${screenshotParam}, fullPageScreenshot: ${fullPageScreenshot}, method: ${fireEngineOptionsParam?.method ?? "null"} }` ); + if (pageOptions?.useFastMode) { + fireEngineOptionsParam.engine = "tlsclient"; + engine = "tlsclient"; + } - const response = await axios.post( - process.env.FIRE_ENGINE_BETA_URL + endpoint, - { - url: url, - wait: waitParam, - screenshot: screenshotParam, - fullPageScreenshot: fullPageScreenshotParam, - headers: headers, - pageOptions: pageOptions, - ...fireEngineOptionsParam, - }, - { - headers: { - "Content-Type": "application/json", + // atsv is only available for beta customers + const betaCustomersString = process.env.BETA_CUSTOMERS; + const betaCustomers = betaCustomersString ? betaCustomersString.split(",") : []; + + if (pageOptions?.atsv && betaCustomers.includes(teamId)) { + fireEngineOptionsParam.atsv = true; + } else { + pageOptions.atsv = false; + } + + const axiosInstance = axios.create({ + headers: { "Content-Type": "application/json" } + }); + + const startTime = Date.now(); + const _response = await Sentry.startSpan({ + name: "Call to fire-engine" + }, async span => { + return await axiosInstance.post( + process.env.FIRE_ENGINE_BETA_URL + endpoint, + { + url: url, + wait: waitParam, + screenshot: screenshotParam, + fullPageScreenshot: fullPageScreenshotParam, + headers: headers, + pageOptions: pageOptions, + disableJsDom: pageOptions?.disableJsDom ?? false, + priority, + engine, + instantReturn: true, + ...fireEngineOptionsParam, }, - timeout: universalTimeout + waitParam, - } - ); + { + headers: { + "Content-Type": "application/json", + ...(Sentry.isInitialized() ? ({ + "sentry-trace": Sentry.spanToTraceHeader(span), + "baggage": Sentry.spanToBaggageHeader(span), + }) : {}), + } + } + ); + }); - if (response.status !== 200) { + let checkStatusResponse = await axiosInstance.get(`${process.env.FIRE_ENGINE_BETA_URL}/scrape/${_response.data.jobId}`); + while (checkStatusResponse.data.processing && Date.now() - startTime < universalTimeout + waitParam) { + await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second + checkStatusResponse = await axiosInstance.get(`${process.env.FIRE_ENGINE_BETA_URL}/scrape/${_response.data.jobId}`); + } + + if (checkStatusResponse.data.processing) { + Logger.debug(`⛏️ Fire-Engine (${engine}): deleting request - jobId: ${_response.data.jobId}`); + axiosInstance.delete( + process.env.FIRE_ENGINE_BETA_URL + `/scrape/${_response.data.jobId}`, { + validateStatus: (status) => true + } + ).catch((error) => { + Logger.debug(`⛏️ Fire-Engine (${engine}): Failed to delete request - jobId: ${_response.data.jobId} | error: ${error}`); + }); + + Logger.debug(`⛏️ Fire-Engine (${engine}): Request timed out for ${url}`); + logParams.error_message = "Request timed out"; + return { html: "", screenshot: "", pageStatusCode: null, pageError: "" }; + } + + if (checkStatusResponse.status !== 200 || checkStatusResponse.data.error) { Logger.debug( - `⛏️ Fire-Engine (${engine}): Failed to fetch url: ${url} \t status: ${response.status}` + `⛏️ Fire-Engine (${engine}): Failed to fetch url: ${url} \t status: ${checkStatusResponse.status}` ); - logParams.error_message = response.data?.pageError; - logParams.response_code = response.data?.pageStatusCode; + logParams.error_message = checkStatusResponse.data?.pageError ?? checkStatusResponse.data?.error; + logParams.response_code = checkStatusResponse.data?.pageStatusCode; - if(response.data && response.data?.pageStatusCode !== 200) { - Logger.debug(`⛏️ Fire-Engine (${engine}): Failed to fetch url: ${url} \t status: ${response.status}`); + if(checkStatusResponse.data && checkStatusResponse.data?.pageStatusCode !== 200) { + Logger.debug(`⛏️ Fire-Engine (${engine}): Failed to fetch url: ${url} \t status: ${checkStatusResponse.data?.pageStatusCode}`); } + const pageStatusCode = checkStatusResponse.data?.pageStatusCode ? checkStatusResponse.data?.pageStatusCode : checkStatusResponse.data?.error && checkStatusResponse.data?.error.includes("Dns resolution error for hostname") ? 404 : undefined; + return { html: "", screenshot: "", - pageStatusCode: response.data?.pageStatusCode, - pageError: response.data?.pageError, + pageStatusCode, + pageError: checkStatusResponse.data?.pageError ?? checkStatusResponse.data?.error, }; } - const contentType = response.headers["content-type"]; + const contentType = checkStatusResponse.data.responseHeaders?.["content-type"]; + if (contentType && contentType.includes("application/pdf")) { const { content, pageStatusCode, pageError } = await fetchAndProcessPdf( url, @@ -119,18 +178,19 @@ export async function scrapWithFireEngine({ logParams.error_message = pageError; return { html: content, screenshot: "", pageStatusCode, pageError }; } else { - const data = response.data; + const data = checkStatusResponse.data; + logParams.success = (data.pageStatusCode >= 200 && data.pageStatusCode < 300) || data.pageStatusCode === 404; logParams.html = data.content ?? ""; logParams.response_code = data.pageStatusCode; - logParams.error_message = data.pageError; + logParams.error_message = data.pageError ?? data.error; return { html: data.content ?? "", screenshot: data.screenshot ?? "", pageStatusCode: data.pageStatusCode, - pageError: data.pageError, + pageError: data.pageError ?? data.error, }; } } catch (error) { diff --git a/apps/api/src/scraper/WebScraper/scrapers/scrapingBee.ts b/apps/api/src/scraper/WebScraper/scrapers/scrapingBee.ts index 554bfe22..b72fa8b2 100644 --- a/apps/api/src/scraper/WebScraper/scrapers/scrapingBee.ts +++ b/apps/api/src/scraper/WebScraper/scrapers/scrapingBee.ts @@ -43,6 +43,9 @@ export async function scrapWithScrapingBee( transparent_status_code: "True", }, }); + Logger.info( + `⛏️ ScrapingBee: Scraping ${url}` + ); const contentType = response.headers["content-type"]; if (contentType && contentType.includes("application/pdf")) { logParams.success = true; diff --git a/apps/api/src/scraper/WebScraper/single_url.ts b/apps/api/src/scraper/WebScraper/single_url.ts index d5bdcffe..11e1fe37 100644 --- a/apps/api/src/scraper/WebScraper/single_url.ts +++ b/apps/api/src/scraper/WebScraper/single_url.ts @@ -27,8 +27,8 @@ const useScrapingBee = process.env.SCRAPING_BEE_API_KEY !== '' && process.env.SC const useFireEngine = process.env.FIRE_ENGINE_BETA_URL !== '' && process.env.FIRE_ENGINE_BETA_URL !== undefined; export const baseScrapers = [ - useFireEngine ? "fire-engine" : undefined, useFireEngine ? "fire-engine;chrome-cdp" : undefined, + useFireEngine ? "fire-engine" : undefined, useScrapingBee ? "scrapingBee" : undefined, useFireEngine ? undefined : "playwright", useScrapingBee ? "scrapingBeeLoad" : undefined, @@ -88,8 +88,8 @@ function getScrapingFallbackOrder( }); let defaultOrder = [ - useFireEngine ? "fire-engine" : undefined, useFireEngine ? "fire-engine;chrome-cdp" : undefined, + useFireEngine ? "fire-engine" : undefined, useScrapingBee ? "scrapingBee" : undefined, useScrapingBee ? "scrapingBeeLoad" : undefined, useFireEngine ? undefined : "playwright", @@ -125,20 +125,39 @@ function getScrapingFallbackOrder( export async function scrapSingleUrl( jobId: string, urlToScrap: string, - pageOptions: PageOptions = { - onlyMainContent: true, - includeHtml: false, - includeRawHtml: false, - waitFor: 0, - screenshot: false, - fullPageScreenshot: false, - headers: undefined, - }, - extractorOptions: ExtractorOptions = { - mode: "llm-extraction-from-markdown", - }, - existingHtml: string = "" + pageOptions: PageOptions, + extractorOptions?: ExtractorOptions, + existingHtml?: string, + priority?: number, + teamId?: string ): Promise { + pageOptions = { + includeMarkdown: pageOptions.includeMarkdown ?? true, + includeExtract: pageOptions.includeExtract ?? false, + onlyMainContent: pageOptions.onlyMainContent ?? false, + includeHtml: pageOptions.includeHtml ?? false, + includeRawHtml: pageOptions.includeRawHtml ?? false, + waitFor: pageOptions.waitFor ?? undefined, + screenshot: pageOptions.screenshot ?? false, + fullPageScreenshot: pageOptions.fullPageScreenshot ?? false, + headers: pageOptions.headers ?? undefined, + includeLinks: pageOptions.includeLinks ?? true, + replaceAllPathsWithAbsolutePaths: pageOptions.replaceAllPathsWithAbsolutePaths ?? true, + parsePDF: pageOptions.parsePDF ?? true, + removeTags: pageOptions.removeTags ?? [], + onlyIncludeTags: pageOptions.onlyIncludeTags ?? [], + } + + if (extractorOptions) { + extractorOptions = { + mode: extractorOptions?.mode ?? "llm-extraction-from-markdown", + } + } + + if (!existingHtml) { + existingHtml = ""; + } + urlToScrap = urlToScrap.trim(); const attemptScraping = async ( @@ -166,7 +185,7 @@ export async function scrapSingleUrl( case "fire-engine;chrome-cdp": let engine: "playwright" | "chrome-cdp" | "tlsclient" = "playwright"; - if(method === "fire-engine;chrome-cdp"){ + if (method === "fire-engine;chrome-cdp") { engine = "chrome-cdp"; } @@ -180,7 +199,10 @@ export async function scrapSingleUrl( headers: pageOptions.headers, fireEngineOptions: { engine: engine, - } + atsv: pageOptions.atsv, + }, + priority, + teamId, }); scraperResponse.text = response.html; scraperResponse.screenshot = response.screenshot; @@ -339,11 +361,11 @@ export async function scrapSingleUrl( pageError = undefined; } - if (text && text.trim().length >= 100) { - Logger.debug(`⛏️ ${scraper}: Successfully scraped ${urlToScrap} with text length >= 100, breaking`); + if ((text && text.trim().length >= 100) || (typeof screenshot === "string" && screenshot.length > 0)) { + Logger.debug(`⛏️ ${scraper}: Successfully scraped ${urlToScrap} with text length >= 100 or screenshot, breaking`); break; } - if (pageStatusCode && pageStatusCode == 404) { + if (pageStatusCode && (pageStatusCode == 404 || pageStatusCode == 500)) { Logger.debug(`⛏️ ${scraper}: Successfully scraped ${urlToScrap} with status code 404, breaking`); break; } @@ -362,20 +384,22 @@ export async function scrapSingleUrl( let linksOnPage: string[] | undefined; - linksOnPage = extractLinks(rawHtml, urlToScrap); + if (pageOptions.includeLinks) { + linksOnPage = extractLinks(rawHtml, urlToScrap); + } let document: Document; if (screenshot && screenshot.length > 0) { document = { content: text, - markdown: text, + markdown: pageOptions.includeMarkdown || pageOptions.includeExtract ? text : undefined, html: pageOptions.includeHtml ? html : undefined, rawHtml: pageOptions.includeRawHtml || - extractorOptions.mode === "llm-extraction-from-raw-html" + (extractorOptions?.mode === "llm-extraction-from-raw-html" && pageOptions.includeExtract) ? rawHtml : undefined, - linksOnPage, + linksOnPage: pageOptions.includeLinks ? linksOnPage : undefined, metadata: { ...metadata, screenshot: screenshot, @@ -387,11 +411,11 @@ export async function scrapSingleUrl( } else { document = { content: text, - markdown: text, + markdown: pageOptions.includeMarkdown || pageOptions.includeExtract ? text : undefined, html: pageOptions.includeHtml ? html : undefined, rawHtml: pageOptions.includeRawHtml || - extractorOptions.mode === "llm-extraction-from-raw-html" + (extractorOptions?.mode === "llm-extraction-from-raw-html" && pageOptions.includeExtract) ? rawHtml : undefined, metadata: { @@ -400,7 +424,7 @@ export async function scrapSingleUrl( pageStatusCode: pageStatusCode, pageError: pageError, }, - linksOnPage, + linksOnPage: pageOptions.includeLinks ? linksOnPage : undefined, }; } @@ -414,9 +438,9 @@ export async function scrapSingleUrl( }); return { content: "", - markdown: "", + markdown: pageOptions.includeMarkdown || pageOptions.includeExtract ? "" : undefined, html: "", - linksOnPage: [], + linksOnPage: pageOptions.includeLinks ? [] : undefined, metadata: { sourceURL: urlToScrap, pageStatusCode: pageStatusCode, diff --git a/apps/api/src/scraper/WebScraper/utils/__tests__/blocklist.test.ts b/apps/api/src/scraper/WebScraper/utils/__tests__/blocklist.test.ts index 42525257..77411b00 100644 --- a/apps/api/src/scraper/WebScraper/utils/__tests__/blocklist.test.ts +++ b/apps/api/src/scraper/WebScraper/utils/__tests__/blocklist.test.ts @@ -8,7 +8,6 @@ describe('Blocklist Functionality', () => { 'https://twitter.com/home', 'https://instagram.com/explore', 'https://linkedin.com/in/johndoe', - 'https://pinterest.com/pin/create', 'https://snapchat.com/add/johndoe', 'https://tiktok.com/@johndoe', 'https://reddit.com/r/funny', diff --git a/apps/api/src/scraper/WebScraper/utils/__tests__/socialBlockList.test.ts b/apps/api/src/scraper/WebScraper/utils/__tests__/socialBlockList.test.ts index c09cc5b3..3d98fedf 100644 --- a/apps/api/src/scraper/WebScraper/utils/__tests__/socialBlockList.test.ts +++ b/apps/api/src/scraper/WebScraper/utils/__tests__/socialBlockList.test.ts @@ -8,7 +8,6 @@ describe('isUrlBlocked', () => { 'https://twitter.com/someuser', 'https://instagram.com/someuser', 'https://www.linkedin.com/in/someuser', - 'https://pinterest.com/someuser', 'https://snapchat.com/someuser', 'https://tiktok.com/@someuser', 'https://reddit.com/r/somesubreddit', diff --git a/apps/api/src/scraper/WebScraper/utils/blocklist.ts b/apps/api/src/scraper/WebScraper/utils/blocklist.ts index 0bdf9876..e076d890 100644 --- a/apps/api/src/scraper/WebScraper/utils/blocklist.ts +++ b/apps/api/src/scraper/WebScraper/utils/blocklist.ts @@ -6,7 +6,6 @@ const socialMediaBlocklist = [ 'twitter.com', 'instagram.com', 'linkedin.com', - 'pinterest.com', 'snapchat.com', 'tiktok.com', 'reddit.com', @@ -15,6 +14,11 @@ const socialMediaBlocklist = [ 'whatsapp.com', 'wechat.com', 'telegram.org', + 'researchhub.com', + 'youtube.com', + 'corterix.com', + 'southwest.com', + 'ryanair.com' ]; const allowedKeywords = [ diff --git a/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts b/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts index fcd3f69b..af8d1f34 100644 --- a/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts +++ b/apps/api/src/scraper/WebScraper/utils/custom/website_params.ts @@ -1,24 +1,11 @@ export const urlSpecificParams = { "platform.openai.com": { - params: { - wait_browser: "networkidle2", - block_resources: false, - }, - headers: { - "User-Agent": - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36", - "sec-fetch-site": "same-origin", - "sec-fetch-mode": "cors", - "sec-fetch-dest": "empty", - referer: "https://www.google.com/", - "accept-language": "en-US,en;q=0.9", - "accept-encoding": "gzip, deflate, br", - accept: - "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", - }, - cookies: { - __cf_bm: - "mC1On8P2GWT3A5UeSYH6z_MP94xcTAdZ5jfNi9IT2U0-1714327136-1.0.1.1-ILAP5pSX_Oo9PPo2iHEYCYX.p9a0yRBNLr58GHyrzYNDJ537xYpG50MXxUYVdfrD.h3FV5O7oMlRKGA0scbxaQ", + defaultScraper: "fire-engine", + params:{ + wait: 3000, + fireEngineOptions:{ + engine: "chrome-cdp" + }, }, }, "support.greenpay.me":{ @@ -247,5 +234,13 @@ export const urlSpecificParams = { engine: "tlsclient", }, }, + }, + "zoopla.co.uk":{ + defaultScraper: "fire-engine", + params:{ + fireEngineOptions:{ + engine: "chrome-cdp", + }, + }, } }; diff --git a/apps/api/src/scraper/WebScraper/utils/metadata.ts b/apps/api/src/scraper/WebScraper/utils/metadata.ts index 9496d569..fac53b38 100644 --- a/apps/api/src/scraper/WebScraper/utils/metadata.ts +++ b/apps/api/src/scraper/WebScraper/utils/metadata.ts @@ -75,9 +75,7 @@ export function extractMetadata(soup: CheerioAPI, url: string): Metadata { description = soup('meta[name="description"]').attr("content") || null; // Assuming the language is part of the URL as per the regex pattern - const pattern = /([a-zA-Z]+-[A-Z]{2})/; - const match = pattern.exec(url); - language = match ? match[1] : null; + language = soup('html').attr('lang') || null; keywords = soup('meta[name="keywords"]').attr("content") || null; robots = soup('meta[name="robots"]').attr("content") || null; diff --git a/apps/api/src/scraper/WebScraper/utils/utils.ts b/apps/api/src/scraper/WebScraper/utils/utils.ts index dd5906b0..872adc6e 100644 --- a/apps/api/src/scraper/WebScraper/utils/utils.ts +++ b/apps/api/src/scraper/WebScraper/utils/utils.ts @@ -41,10 +41,10 @@ export function extractLinks(html: string, baseUrl: string): string[] { links.push(href); } else if (href.startsWith('/')) { // Relative URL starting with '/', append to origin - links.push(`${origin}${href}`); + links.push(new URL(href, baseUrl).href); } else if (!href.startsWith('#') && !href.startsWith('mailto:')) { // Relative URL not starting with '/', append to base URL - links.push(`${baseUrl}/${href}`); + links.push(new URL(href, baseUrl).href); } else if (href.startsWith('mailto:')) { // mailto: links, add as is links.push(href); diff --git a/apps/api/src/search/fireEngine.ts b/apps/api/src/search/fireEngine.ts new file mode 100644 index 00000000..7c6d8a4d --- /dev/null +++ b/apps/api/src/search/fireEngine.ts @@ -0,0 +1,45 @@ +import axios from "axios"; +import dotenv from "dotenv"; +import { SearchResult } from "../../src/lib/entities"; + +dotenv.config(); + +export async function fireEngineMap(q: string, options: { + tbs?: string; + filter?: string; + lang?: string; + country?: string; + location?: string; + numResults: number; + page?: number; +}): Promise { + let data = JSON.stringify({ + query: q, + lang: options.lang, + country: options.country, + location: options.location, + tbs: options.tbs, + numResults: options.numResults, + page: options.page ?? 1, + }); + + if (!process.env.FIRE_ENGINE_BETA_URL) { + console.warn("(v1/map Beta) Results might differ from cloud offering currently."); + return []; + } + + let config = { + method: "POST", + url: `${process.env.FIRE_ENGINE_BETA_URL}/search`, + headers: { + "Content-Type": "application/json", + }, + data: data, + }; + const response = await axios(config); + if (response && response) { + return response.data + } else { + return []; + } +} diff --git a/apps/api/src/search/googlesearch.ts b/apps/api/src/search/googlesearch.ts index 060f4bd8..0e247702 100644 --- a/apps/api/src/search/googlesearch.ts +++ b/apps/api/src/search/googlesearch.ts @@ -52,7 +52,7 @@ async function _req(term: string, results: number, lang: string, country: string -export async function google_search(term: string, advanced = false, num_results = 7, tbs = null, filter = null, lang = "en", country = "us", proxy = null, sleep_interval = 0, timeout = 5000, ) :Promise { +export async function googleSearch(term: string, advanced = false, num_results = 7, tbs = null, filter = null, lang = "en", country = "us", proxy = null, sleep_interval = 0, timeout = 5000, ) :Promise { let proxies = null; if (proxy) { if (proxy.startsWith("https")) { diff --git a/apps/api/src/search/index.ts b/apps/api/src/search/index.ts index f5bc06e3..f4c5b6d0 100644 --- a/apps/api/src/search/index.ts +++ b/apps/api/src/search/index.ts @@ -1,11 +1,9 @@ import { Logger } from "../../src/lib/logger"; import { SearchResult } from "../../src/lib/entities"; -import { google_search } from "./googlesearch"; +import { googleSearch } from "./googlesearch"; +import { fireEngineMap } from "./fireEngine"; import { serper_search } from "./serper"; - - - export async function search({ query, advanced = false, @@ -30,12 +28,20 @@ export async function search({ proxy?: string; sleep_interval?: number; timeout?: number; -}) : Promise { +}): Promise { try { - if (process.env.SERPER_API_KEY ) { - return await serper_search(query, {num_results, tbs, filter, lang, country, location}); + + if (process.env.SERPER_API_KEY) { + return await serper_search(query, { + num_results, + tbs, + filter, + lang, + country, + location, + }); } - return await google_search( + return await googleSearch( query, advanced, num_results, @@ -49,7 +55,6 @@ export async function search({ ); } catch (error) { Logger.error(`Error in search function: ${error}`); - return [] + return []; } - // if process.env.SERPER_API_KEY is set, use serper } diff --git a/apps/api/src/services/alerts/index.ts b/apps/api/src/services/alerts/index.ts index 88b3c726..cb953e2e 100644 --- a/apps/api/src/services/alerts/index.ts +++ b/apps/api/src/services/alerts/index.ts @@ -1,5 +1,5 @@ import { Logger } from "../../../src/lib/logger"; -import { getWebScraperQueue } from "../queue-service"; +import { getScrapeQueue } from "../queue-service"; import { sendSlackWebhook } from "./slack"; export async function checkAlerts() { @@ -13,8 +13,8 @@ export async function checkAlerts() { Logger.info("Initializing alerts"); const checkActiveJobs = async () => { try { - const webScraperQueue = getWebScraperQueue(); - const activeJobs = await webScraperQueue.getActiveCount(); + const scrapeQueue = getScrapeQueue(); + const activeJobs = await scrapeQueue.getActiveCount(); if (activeJobs > Number(process.env.ALERT_NUM_ACTIVE_JOBS)) { Logger.warn( `Alert: Number of active jobs is over ${process.env.ALERT_NUM_ACTIVE_JOBS}. Current active jobs: ${activeJobs}.` @@ -34,11 +34,10 @@ export async function checkAlerts() { }; const checkWaitingQueue = async () => { - const webScraperQueue = getWebScraperQueue(); - const waitingJobs = await webScraperQueue.getWaitingCount(); - const paused = await webScraperQueue.getPausedCount(); + const scrapeQueue = getScrapeQueue(); + const waitingJobs = await scrapeQueue.getWaitingCount(); - if (waitingJobs !== paused && waitingJobs > Number(process.env.ALERT_NUM_WAITING_JOBS)) { + if (waitingJobs > Number(process.env.ALERT_NUM_WAITING_JOBS)) { Logger.warn( `Alert: Number of waiting jobs is over ${process.env.ALERT_NUM_WAITING_JOBS}. Current waiting jobs: ${waitingJobs}.` ); diff --git a/apps/api/src/services/alerts/slack.ts b/apps/api/src/services/alerts/slack.ts index 96bf1c09..0fa75693 100644 --- a/apps/api/src/services/alerts/slack.ts +++ b/apps/api/src/services/alerts/slack.ts @@ -3,9 +3,9 @@ import { Logger } from "../../../src/lib/logger"; export async function sendSlackWebhook( message: string, - alertEveryone: boolean = false + alertEveryone: boolean = false, + webhookUrl: string = process.env.SLACK_WEBHOOK_URL ?? "" ) { - const webhookUrl = process.env.SLACK_WEBHOOK_URL; const messagePrefix = alertEveryone ? " " : ""; const payload = { text: `${messagePrefix} ${message}`, diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index 765d028e..22dc72df 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -4,37 +4,12 @@ import { sendNotification } from "../notification/email_notification"; import { supabase_service } from "../supabase"; import { Logger } from "../../lib/logger"; import { getValue, setValue } from "../redis"; -import Redlock from "redlock"; -import Client from "ioredis"; +import { redlock } from "../redlock"; + const FREE_CREDITS = 500; -const redlock = new Redlock( - // You should have one client for each independent redis node - // or cluster. - [new Client(process.env.REDIS_RATE_LIMIT_URL)], - { - // The expected clock drift; for more details see: - // http://redis.io/topics/distlock - driftFactor: 0.01, // multiplied by lock ttl to determine drift time - // The max number of times Redlock will attempt to lock a resource - // before erroring. - retryCount: 5, - - // the time in ms between attempts - retryDelay: 100, // time in ms - - // the max time in ms randomly added to retries - // to improve performance under high contention - // see https://www.awsarchitectureblog.com/2015/03/backoff.html - retryJitter: 200, // time in ms - - // The minimum remaining time on a lock before an extension is automatically - // attempted with the `using` API. - automaticExtensionThreshold: 500, // time in ms - } -); export async function billTeam(team_id: string, credits: number) { return withAuth(supaBillTeam)(team_id, credits); } @@ -193,10 +168,11 @@ export async function supaBillTeam(team_id: string, credits: number) { export async function checkTeamCredits(team_id: string, credits: number) { return withAuth(supaCheckTeamCredits)(team_id, credits); } + // if team has enough credits for the operation, return true, else return false export async function supaCheckTeamCredits(team_id: string, credits: number) { if (team_id === "preview") { - return { success: true, message: "Preview team, no credits used" }; + return { success: true, message: "Preview team, no credits used", remainingCredits: Infinity }; } // Retrieve the team's active subscription and check for available coupons concurrently @@ -223,21 +199,44 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { ); } + + // Free credits, no coupons - if (subscriptionError || !subscription) { + if (!subscription || subscriptionError) { + // If there is no active subscription but there are available coupons if (couponCredits >= credits) { - return { success: true, message: "Sufficient credits available" }; + return { success: true, message: "Sufficient credits available", remainingCredits: couponCredits }; } - const { data: creditUsages, error: creditUsageError } = - await supabase_service + let creditUsages; + let creditUsageError; + let retries = 0; + const maxRetries = 3; + const retryInterval = 2000; // 2 seconds + + while (retries < maxRetries) { + const result = await supabase_service .from("credit_usage") .select("credits_used") .is("subscription_id", null) .eq("team_id", team_id); + creditUsages = result.data; + creditUsageError = result.error; + + if (!creditUsageError) { + break; + } + + retries++; + if (retries < maxRetries) { + await new Promise(resolve => setTimeout(resolve, retryInterval)); + } + } + if (creditUsageError) { + Logger.error(`Credit usage error after ${maxRetries} attempts: ${creditUsageError}`); throw new Error( `Failed to retrieve credit usage for team_id: ${team_id}` ); @@ -277,9 +276,10 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { return { success: false, message: "Insufficient credits, please upgrade!", + remainingCredits: FREE_CREDITS - totalCreditsUsed }; } - return { success: true, message: "Sufficient credits available" }; + return { success: true, message: "Sufficient credits available", remainingCredits: FREE_CREDITS - totalCreditsUsed }; } let totalCreditsUsed = 0; @@ -340,24 +340,24 @@ export async function supaCheckTeamCredits(team_id: string, credits: number) { // Compare the adjusted total credits used with the credits allowed by the plan if (adjustedCreditsUsed + credits > price.credits) { - await sendNotification( - team_id, - NotificationType.LIMIT_REACHED, - subscription.current_period_start, - subscription.current_period_end - ); - return { success: false, message: "Insufficient credits, please upgrade!" }; + // await sendNotification( + // team_id, + // NotificationType.LIMIT_REACHED, + // subscription.current_period_start, + // subscription.current_period_end + // ); + return { success: false, message: "Insufficient credits, please upgrade!", remainingCredits: creditLimit - adjustedCreditsUsed }; } else if (creditUsagePercentage >= 0.8) { // Send email notification for approaching credit limit - await sendNotification( - team_id, - NotificationType.APPROACHING_LIMIT, - subscription.current_period_start, - subscription.current_period_end - ); + // await sendNotification( + // team_id, + // NotificationType.APPROACHING_LIMIT, + // subscription.current_period_start, + // subscription.current_period_end + // ); } - return { success: true, message: "Sufficient credits available" }; + return { success: true, message: "Sufficient credits available", remainingCredits: creditLimit - adjustedCreditsUsed }; } // Count the total credits used by a team within the current billing period and return the remaining credits. diff --git a/apps/api/src/services/logging/log_job.ts b/apps/api/src/services/logging/log_job.ts index 2525917c..d4494f09 100644 --- a/apps/api/src/services/logging/log_job.ts +++ b/apps/api/src/services/logging/log_job.ts @@ -41,10 +41,11 @@ export async function logJob(job: FirecrawlJob) { extractor_options: job.extractor_options, num_tokens: job.num_tokens, retry: !!job.retry, + crawl_id: job.crawl_id, }, ]); - if (process.env.POSTHOG_API_KEY) { + if (process.env.POSTHOG_API_KEY && !job.crawl_id) { let phLog = { distinctId: "from-api", //* To identify this on the group level, setting distinctid to a static string per posthog docs: https://posthog.com/docs/product-analytics/group-analytics#advanced-server-side-only-capturing-group-events-without-a-user ...(job.team_id !== "preview" && { diff --git a/apps/api/src/services/queue-jobs.ts b/apps/api/src/services/queue-jobs.ts index 9403fc1f..941b571d 100644 --- a/apps/api/src/services/queue-jobs.ts +++ b/apps/api/src/services/queue-jobs.ts @@ -1,17 +1,72 @@ -import { Job, Queue } from "bull"; -import { - getWebScraperQueue, -} from "./queue-service"; +import { Job, Queue } from "bullmq"; +import { getScrapeQueue } from "./queue-service"; import { v4 as uuidv4 } from "uuid"; import { WebScraperOptions } from "../types"; +import * as Sentry from "@sentry/node"; -export async function addWebScraperJob( - webScraperOptions: WebScraperOptions, - options: any = {} +async function addScrapeJobRaw( + webScraperOptions: any, + options: any, + jobId: string, + jobPriority: number = 10 ): Promise { - return await getWebScraperQueue().add(webScraperOptions, { + return await getScrapeQueue().add(jobId, webScraperOptions, { ...options, - jobId: uuidv4(), + priority: jobPriority, + jobId, }); } +export async function addScrapeJob( + webScraperOptions: WebScraperOptions, + options: any = {}, + jobId: string = uuidv4(), + jobPriority: number = 10 +): Promise { + + if (Sentry.isInitialized()) { + const size = JSON.stringify(webScraperOptions).length; + return await Sentry.startSpan({ + name: "Add scrape job", + op: "queue.publish", + attributes: { + "messaging.message.id": jobId, + "messaging.destination.name": getScrapeQueue().name, + "messaging.message.body.size": size, + }, + }, async (span) => { + return await addScrapeJobRaw({ + ...webScraperOptions, + sentry: { + trace: Sentry.spanToTraceHeader(span), + baggage: Sentry.spanToBaggageHeader(span), + size, + }, + }, options, jobId, jobPriority); + }); + } else { + return await addScrapeJobRaw(webScraperOptions, options, jobId, jobPriority); + } +} + +export function waitForJob(jobId: string, timeout: number) { + return new Promise((resolve, reject) => { + const start = Date.now(); + const int = setInterval(async () => { + if (Date.now() >= start + timeout) { + clearInterval(int); + reject(new Error("Job wait ")); + } else { + const state = await getScrapeQueue().getJobState(jobId); + if (state === "completed") { + clearInterval(int); + resolve((await getScrapeQueue().getJob(jobId)).returnvalue); + } else if (state === "failed") { + // console.log("failed", (await getScrapeQueue().getJob(jobId)).failedReason); + clearInterval(int); + reject((await getScrapeQueue().getJob(jobId)).failedReason); + } + } + }, 1000); + }) +} diff --git a/apps/api/src/services/queue-service.ts b/apps/api/src/services/queue-service.ts index 0cd65f32..113b3fa3 100644 --- a/apps/api/src/services/queue-service.ts +++ b/apps/api/src/services/queue-service.ts @@ -1,23 +1,40 @@ -import Queue from "bull"; -import { Queue as BullQueue } from "bull"; +import { Queue } from "bullmq"; import { Logger } from "../lib/logger"; +import IORedis from "ioredis"; -let webScraperQueue: BullQueue; +let scrapeQueue: Queue; -export function getWebScraperQueue() { - if (!webScraperQueue) { - webScraperQueue = new Queue("web-scraper", process.env.REDIS_URL, { - settings: { - lockDuration: 1 * 60 * 1000, // 1 minute in milliseconds, - lockRenewTime: 15 * 1000, // 15 seconds in milliseconds - stalledInterval: 30 * 1000, - maxStalledCount: 10, - }, - defaultJobOptions:{ - attempts: 2 +export const redisConnection = new IORedis(process.env.REDIS_URL, { + maxRetriesPerRequest: null, +}); + +export const scrapeQueueName = "{scrapeQueue}"; + +export function getScrapeQueue() { + if (!scrapeQueue) { + scrapeQueue = new Queue( + scrapeQueueName, + { + connection: redisConnection, } - }); + // { + // settings: { + // lockDuration: 1 * 60 * 1000, // 1 minute in milliseconds, + // lockRenewTime: 15 * 1000, // 15 seconds in milliseconds + // stalledInterval: 30 * 1000, + // maxStalledCount: 10, + // }, + // defaultJobOptions:{ + // attempts: 5 + // } + // } + ); Logger.info("Web scraper queue created"); } - return webScraperQueue; + return scrapeQueue; } + + +// === REMOVED IN FAVOR OF POLLING -- NOT RELIABLE +// import { QueueEvents } from 'bullmq'; +// export const scrapeQueueEvents = new QueueEvents(scrapeQueueName, { connection: redisConnection.duplicate() }); \ No newline at end of file diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index cc92b3ab..6488759f 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -1,74 +1,456 @@ -import { CustomError } from "../lib/custom-error"; -import { getWebScraperQueue } from "./queue-service"; import "dotenv/config"; +import "./sentry"; +import * as Sentry from "@sentry/node"; +import { CustomError } from "../lib/custom-error"; +import { + getScrapeQueue, + redisConnection, + scrapeQueueName, +} from "./queue-service"; import { logtail } from "./logtail"; import { startWebScraperPipeline } from "../main/runWebScraper"; import { callWebhook } from "./webhook"; import { logJob } from "./logging/log_job"; -import { initSDK } from '@hyperdx/node-opentelemetry'; -import { Job } from "bull"; +import { initSDK } from "@hyperdx/node-opentelemetry"; +import { Job } from "bullmq"; import { Logger } from "../lib/logger"; -import { ScrapeEvents } from "../lib/scrape-events"; +import { Worker } from "bullmq"; +import systemMonitor from "./system-monitor"; +import { v4 as uuidv4 } from "uuid"; +import { + addCrawlJob, + addCrawlJobDone, + crawlToCrawler, + finishCrawl, + getCrawl, + getCrawlJobs, + lockURL, +} from "../lib/crawl-redis"; +import { StoredCrawl } from "../lib/crawl-redis"; +import { addScrapeJob } from "./queue-jobs"; +import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; +import { + addJobPriority, + deleteJobPriority, + getJobPriority, +} from "../../src/lib/job-priority"; +import { PlanType } from "../types"; +import { getJobs } from "../../src/controllers/v1/crawl-status"; -if (process.env.ENV === 'production') { +if (process.env.ENV === "production") { initSDK({ consoleCapture: true, additionalInstrumentations: [], }); } +const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); -const wsq = getWebScraperQueue(); +const workerLockDuration = Number(process.env.WORKER_LOCK_DURATION) || 60000; +const workerStalledCheckInterval = + Number(process.env.WORKER_STALLED_CHECK_INTERVAL) || 30000; +const jobLockExtendInterval = + Number(process.env.JOB_LOCK_EXTEND_INTERVAL) || 15000; +const jobLockExtensionTime = + Number(process.env.JOB_LOCK_EXTENSION_TIME) || 60000; -async function processJob(job: Job, done) { +const cantAcceptConnectionInterval = + Number(process.env.CANT_ACCEPT_CONNECTION_INTERVAL) || 2000; +const connectionMonitorInterval = + Number(process.env.CONNECTION_MONITOR_INTERVAL) || 10; +const gotJobInterval = Number(process.env.CONNECTION_MONITOR_INTERVAL) || 20; + +const processJobInternal = async (token: string, job: Job) => { + const extendLockInterval = setInterval(async () => { + Logger.info(`🐂 Worker extending lock on job ${job.id}`); + await job.extendLock(token, jobLockExtensionTime); + }, jobLockExtendInterval); + + await addJobPriority(job.data.team_id, job.id); + let err = null; + try { + const result = await processJob(job, token); + try { + if (job.data.crawl_id && process.env.USE_DB_AUTHENTICATION === "true") { + await job.moveToCompleted(null, token, false); + } else { + await job.moveToCompleted(result.docs, token, false); + } + } catch (e) {} + } catch (error) { + console.log("Job failed, error:", error); + Sentry.captureException(error); + err = error; + await job.moveToFailed(error, token, false); + } finally { + await deleteJobPriority(job.data.team_id, job.id); + clearInterval(extendLockInterval); + } + + return err; +}; + +let isShuttingDown = false; + +process.on("SIGINT", () => { + console.log("Received SIGINT. Shutting down gracefully..."); + isShuttingDown = true; +}); + +const workerFun = async ( + queueName: string, + processJobInternal: (token: string, job: Job) => Promise +) => { + const worker = new Worker(queueName, null, { + connection: redisConnection, + lockDuration: 1 * 60 * 1000, // 1 minute + // lockRenewTime: 15 * 1000, // 15 seconds + stalledInterval: 30 * 1000, // 30 seconds + maxStalledCount: 10, // 10 times + }); + + worker.startStalledCheckTimer(); + + const monitor = await systemMonitor; + + while (true) { + if (isShuttingDown) { + console.log("No longer accepting new jobs. SIGINT"); + break; + } + const token = uuidv4(); + const canAcceptConnection = await monitor.acceptConnection(); + if (!canAcceptConnection) { + console.log("Cant accept connection"); + await sleep(cantAcceptConnectionInterval); // more sleep + continue; + } + + const job = await worker.getNextJob(token); + if (job) { + if (job.data && job.data.sentry && Sentry.isInitialized()) { + Sentry.continueTrace( + { + sentryTrace: job.data.sentry.trace, + baggage: job.data.sentry.baggage, + }, + () => { + Sentry.startSpan( + { + name: "Scrape job", + attributes: { + job: job.id, + worker: process.env.FLY_MACHINE_ID ?? worker.id, + }, + }, + async (span) => { + await Sentry.startSpan( + { + name: "Process scrape job", + op: "queue.process", + attributes: { + "messaging.message.id": job.id, + "messaging.destination.name": getScrapeQueue().name, + "messaging.message.body.size": job.data.sentry.size, + "messaging.message.receive.latency": + Date.now() - (job.processedOn ?? job.timestamp), + "messaging.message.retry.count": job.attemptsMade, + }, + }, + async () => { + const res = await processJobInternal(token, job); + if (res !== null) { + span.setStatus({ code: 2 }); // ERROR + } else { + span.setStatus({ code: 1 }); // OK + } + } + ); + } + ); + } + ); + } else { + Sentry.startSpan( + { + name: "Scrape job", + attributes: { + job: job.id, + worker: process.env.FLY_MACHINE_ID ?? worker.id, + }, + }, + () => { + processJobInternal(token, job); + } + ); + } + + await sleep(gotJobInterval); + } else { + await sleep(connectionMonitorInterval); + } + } +}; + +workerFun(scrapeQueueName, processJobInternal); + +async function processJob(job: Job, token: string) { Logger.info(`🐂 Worker taking job ${job.id}`); + // Check if the job URL is researchhub and block it immediately + // TODO: remove this once solve the root issue + if ( + job.data.url && + (job.data.url.includes("researchhub.com") || + job.data.url.includes("ebay.com") || + job.data.url.includes("youtube.com") || + job.data.url.includes("microsoft.com")) + ) { + Logger.info(`🐂 Blocking job ${job.id} with URL ${job.data.url}`); + const data = { + success: false, + docs: [], + project_id: job.data.project_id, + error: + "URL is blocked. Suspecious activity detected. Please contact hello@firecrawl.com if you believe this is an error.", + }; + await job.moveToCompleted(data.docs, token, false); + return data; + } + try { - job.progress({ + job.updateProgress({ current: 1, total: 100, current_step: "SCRAPING", current_url: "", }); const start = Date.now(); - const { success, message, docs } = await startWebScraperPipeline({ job }); + + const { success, message, docs } = await startWebScraperPipeline({ + job, + token, + }); + + // Better if we throw here so we capture with the correct error + if (!success) { + throw new Error(message); + } const end = Date.now(); const timeTakenInSeconds = (end - start) / 1000; + const rawHtml = docs[0] ? docs[0].rawHtml : ""; + const data = { - success: success, + success, result: { links: docs.map((doc) => { - return { content: doc, source: doc?.metadata?.sourceURL ?? doc?.url ?? "" }; + return { + content: doc, + source: doc?.metadata?.sourceURL ?? doc?.url ?? "", + }; }), }, project_id: job.data.project_id, error: message /* etc... */, + docs, }; - await callWebhook(job.data.team_id, job.id as string, data); + // No idea what this does and when it is called. + if (job.data.mode === "crawl" && !job.data.v1) { + callWebhook( + job.data.team_id, + job.id as string, + data, + job.data.webhook, + job.data.v1 + ); + } + if (job.data.webhook && job.data.mode !== "crawl" && job.data.v1) { + await callWebhook( + job.data.team_id, + job.data.crawl_id, + data, + job.data.webhook, + job.data.v1, + "crawl.page", + true + ); + } + + if (job.data.crawl_id) { + await logJob({ + job_id: job.id as string, + success: success, + message: message, + num_docs: docs.length, + docs: docs, + time_taken: timeTakenInSeconds, + team_id: job.data.team_id, + mode: job.data.mode, + url: job.data.url, + crawlerOptions: job.data.crawlerOptions, + pageOptions: job.data.pageOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + }); + + await addCrawlJobDone(job.data.crawl_id, job.id); + + const sc = (await getCrawl(job.data.crawl_id)) as StoredCrawl; + + if (!job.data.sitemapped) { + if (!sc.cancelled) { + const crawler = crawlToCrawler(job.data.crawl_id, sc); + + const links = crawler.filterLinks( + crawler.extractLinksFromHTML(rawHtml ?? "", sc.originUrl), + Infinity, + sc.crawlerOptions?.maxDepth ?? 10 + ); + + for (const link of links) { + if (await lockURL(job.data.crawl_id, sc, link)) { + // This seems to work really welel + const jobPriority = await getJobPriority({ + plan: sc.plan as PlanType, + team_id: sc.team_id, + basePriority: job.data.crawl_id ? 20 : 10, + }); + const jobId = uuidv4(); + + // console.log("plan: ", sc.plan); + // console.log("team_id: ", sc.team_id) + // console.log("base priority: ", job.data.crawl_id ? 20 : 10) + // console.log("job priority: " , jobPriority, "\n\n\n") + + const newJob = await addScrapeJob( + { + url: link, + mode: "single_urls", + crawlerOptions: sc.crawlerOptions, + team_id: sc.team_id, + pageOptions: sc.pageOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + v1: job.data.v1, + }, + {}, + jobId, + jobPriority + ); + + await addCrawlJob(job.data.crawl_id, newJob.id); + } + } + } + } + + if (await finishCrawl(job.data.crawl_id)) { + + + if (!job.data.v1) { + const jobIDs = await getCrawlJobs(job.data.crawl_id); + + const jobs = (await getJobs(jobIDs)).sort((a, b) => a.timestamp - b.timestamp); + const jobStatuses = await Promise.all(jobs.map((x) => x.getState())); + const jobStatus = + sc.cancelled || jobStatuses.some((x) => x === "failed") + ? "failed" + : "completed"; + + const fullDocs = jobs.map((x) => + Array.isArray(x.returnvalue) ? x.returnvalue[0] : x.returnvalue + ); + + await logJob({ + job_id: job.data.crawl_id, + success: jobStatus === "completed", + message: sc.cancelled ? "Cancelled" : message, + num_docs: fullDocs.length, + docs: [], + time_taken: (Date.now() - sc.createdAt) / 1000, + team_id: job.data.team_id, + mode: "crawl", + url: sc.originUrl, + crawlerOptions: sc.crawlerOptions, + pageOptions: sc.pageOptions, + origin: job.data.origin, + }); + + const data = { + success: jobStatus !== "failed", + result: { + links: fullDocs.map((doc) => { + return { + content: doc, + source: doc?.metadata?.sourceURL ?? doc?.url ?? "", + }; + }), + }, + project_id: job.data.project_id, + error: message /* etc... */, + docs: fullDocs, + }; + + // v0 web hooks, call when done with all the data + if (!job.data.v1) { + callWebhook( + job.data.team_id, + job.data.crawl_id, + data, + job.data.webhook, + job.data.v1, + "crawl.completed" + ); + } + } else { + const jobIDs = await getCrawlJobs(job.data.crawl_id); + const jobStatuses = await Promise.all(jobIDs.map((x) => getScrapeQueue().getJobState(x))); + const jobStatus = + sc.cancelled || jobStatuses.some((x) => x === "failed") + ? "failed" + : "completed"; + + // v1 web hooks, call when done with no data, but with event completed + if (job.data.v1 && job.data.webhook) { + callWebhook( + job.data.team_id, + job.data.crawl_id, + [], + job.data.webhook, + job.data.v1, + "crawl.completed" + ); + } + + await logJob({ + job_id: job.data.crawl_id, + success: jobStatus === "completed", + message: sc.cancelled ? "Cancelled" : message, + num_docs: jobIDs.length, + docs: [], + time_taken: (Date.now() - sc.createdAt) / 1000, + team_id: job.data.team_id, + mode: "crawl", + url: sc.originUrl, + crawlerOptions: sc.crawlerOptions, + pageOptions: sc.pageOptions, + origin: job.data.origin, + }); + } + } + } - await logJob({ - job_id: job.id as string, - success: success, - message: message, - num_docs: docs.length, - docs: docs, - time_taken: timeTakenInSeconds, - team_id: job.data.team_id, - mode: "crawl", - url: job.data.url, - crawlerOptions: job.data.crawlerOptions, - pageOptions: job.data.pageOptions, - origin: job.data.origin, - }); Logger.info(`🐂 Job done ${job.id}`); - done(null, data); + return data; } catch (error) { Logger.error(`🐂 Job errored ${job.id} - ${error}`); - if (await getWebScraperQueue().isPaused(false)) { - Logger.debug("🐂Queue is paused, ignoring"); - return; - } + + Sentry.captureException(error, { + data: { + job: job.id, + }, + }); if (error instanceof CustomError) { // Here we handle the error, then save the failed job @@ -81,6 +463,9 @@ async function processJob(job: Job, done) { }); } Logger.error(error); + if (error.stack) { + Logger.error(error.stack); + } logtail.error("Overall error ingesting", { job_id: job.id, @@ -89,37 +474,87 @@ async function processJob(job: Job, done) { const data = { success: false, + docs: [], project_id: job.data.project_id, error: "Something went wrong... Contact help@mendable.ai or try again." /* etc... */, }; - await callWebhook(job.data.team_id, job.id as string, data); - await logJob({ - job_id: job.id as string, - success: false, - message: typeof error === 'string' ? error : (error.message ?? "Something went wrong... Contact help@mendable.ai"), - num_docs: 0, - docs: [], - time_taken: 0, - team_id: job.data.team_id, - mode: "crawl", - url: job.data.url, - crawlerOptions: job.data.crawlerOptions, - pageOptions: job.data.pageOptions, - origin: job.data.origin, - }); - done(null, data); + + if (!job.data.v1 && (job.data.mode === "crawl" || job.data.crawl_id)) { + callWebhook( + job.data.team_id, + job.data.crawl_id ?? (job.id as string), + data, + job.data.webhook, + job.data.v1 + ); + } + if (job.data.v1) { + callWebhook( + job.data.team_id, + job.id as string, + [], + job.data.webhook, + job.data.v1, + "crawl.failed" + ); + } + + if (job.data.crawl_id) { + await logJob({ + job_id: job.id as string, + success: false, + message: + typeof error === "string" + ? error + : error.message ?? + "Something went wrong... Contact help@mendable.ai", + num_docs: 0, + docs: [], + time_taken: 0, + team_id: job.data.team_id, + mode: job.data.mode, + url: job.data.url, + crawlerOptions: job.data.crawlerOptions, + pageOptions: job.data.pageOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + }); + + const sc = await getCrawl(job.data.crawl_id); + + await logJob({ + job_id: job.data.crawl_id, + success: false, + message: + typeof error === "string" + ? error + : error.message ?? + "Something went wrong... Contact help@mendable.ai", + num_docs: 0, + docs: [], + time_taken: 0, + team_id: job.data.team_id, + mode: "crawl", + url: sc ? sc.originUrl : job.data.url, + crawlerOptions: sc ? sc.crawlerOptions : job.data.crawlerOptions, + pageOptions: sc ? sc.pageOptions : job.data.pageOptions, + origin: job.data.origin, + }); + } + // done(null, data); + return data; } } -wsq.process( - Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)), - processJob -); +// wsq.process( +// Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)), +// processJob +// ); -wsq.on("waiting", j => ScrapeEvents.logJobEvent(j, "waiting")); -wsq.on("active", j => ScrapeEvents.logJobEvent(j, "active")); -wsq.on("completed", j => ScrapeEvents.logJobEvent(j, "completed")); -wsq.on("paused", j => ScrapeEvents.logJobEvent(j, "paused")); -wsq.on("resumed", j => ScrapeEvents.logJobEvent(j, "resumed")); -wsq.on("removed", j => ScrapeEvents.logJobEvent(j, "removed")); +// wsq.on("waiting", j => ScrapeEvents.logJobEvent(j, "waiting")); +// wsq.on("active", j => ScrapeEvents.logJobEvent(j, "active")); +// wsq.on("completed", j => ScrapeEvents.logJobEvent(j, "completed")); +// wsq.on("paused", j => ScrapeEvents.logJobEvent(j, "paused")); +// wsq.on("resumed", j => ScrapeEvents.logJobEvent(j, "resumed")); +// wsq.on("removed", j => ScrapeEvents.logJobEvent(j, "removed")); diff --git a/apps/api/src/services/rate-limiter.test.ts b/apps/api/src/services/rate-limiter.test.ts index c49c85d9..3e252301 100644 --- a/apps/api/src/services/rate-limiter.test.ts +++ b/apps/api/src/services/rate-limiter.test.ts @@ -65,7 +65,7 @@ describe("Rate Limiter Service", () => { "test-prefix:someToken", "standard" ); - expect(limiter2.points).toBe(50); + expect(limiter2.points).toBe(100); const limiter3 = getRateLimiter( "search" as RateLimiterMode, @@ -79,7 +79,7 @@ describe("Rate Limiter Service", () => { "test-prefix:someToken", "growth" ); - expect(limiter4.points).toBe(150); + expect(limiter4.points).toBe(250); }); it("should return the default rate limiter if plan is not provided", () => { @@ -153,7 +153,7 @@ describe("Rate Limiter Service", () => { "crawlStatus" as RateLimiterMode, "test-prefix:someToken" ); - expect(limiter2.points).toBe(150); + expect(limiter2.points).toBe(250); }); it("should consume points correctly for 'crawl' mode", async () => { @@ -188,14 +188,13 @@ describe("Rate Limiter Service", () => { "test-prefix:someTokenXY", "hobby" ); - // expect hobby to have 100 points - expect(limiter.points).toBe(10); + expect(limiter.points).toBe(20); const consumePoints = 5; const res = await limiter.consume("test-prefix:someTokenXY", consumePoints); expect(res.consumedPoints).toBe(5); - expect(res.remainingPoints).toBe(5); + expect(res.remainingPoints).toBe(15); }); it("should return the correct rate limiter for 'crawl' mode", () => { @@ -227,7 +226,7 @@ describe("Rate Limiter Service", () => { "test-prefix:someToken", "free" ); - expect(limiter.points).toBe(5); + expect(limiter.points).toBe(10); const limiter2 = getRateLimiter( "scrape" as RateLimiterMode, @@ -241,7 +240,14 @@ describe("Rate Limiter Service", () => { "test-prefix:someToken", "standard" ); - expect(limiter3.points).toBe(50); + expect(limiter3.points).toBe(100); + + const limiter4 = getRateLimiter( + "scrape" as RateLimiterMode, + "test-prefix:someToken", + "growth" + ); + expect(limiter4.points).toBe(1000); }); it("should return the correct rate limiter for 'search' mode", () => { @@ -309,7 +315,7 @@ describe("Rate Limiter Service", () => { "crawlStatus" as RateLimiterMode, "test-prefix:someToken" ); - expect(limiter2.points).toBe(150); + expect(limiter2.points).toBe(250); }); it("should return the correct rate limiter for 'testSuite' mode", () => { diff --git a/apps/api/src/services/rate-limiter.ts b/apps/api/src/services/rate-limiter.ts index d90ab4f7..dade8493 100644 --- a/apps/api/src/services/rate-limiter.ts +++ b/apps/api/src/services/rate-limiter.ts @@ -14,18 +14,20 @@ const RATE_LIMITS = { standardNew: 10, standardnew: 10, growth: 50, + growthdouble: 50, }, scrape: { default: 20, - free: 5, + free: 10, starter: 20, - standard: 50, + standard: 100, standardOld: 40, scale: 500, - hobby: 10, - standardNew: 50, - standardnew: 50, - growth: 500, + hobby: 20, + standardNew: 100, + standardnew: 100, + growth: 1000, + growthdouble: 1000, }, search: { default: 20, @@ -38,6 +40,20 @@ const RATE_LIMITS = { standardNew: 50, standardnew: 50, growth: 500, + growthdouble: 500, + }, + map:{ + default: 20, + free: 5, + starter: 20, + standard: 40, + standardOld: 40, + scale: 500, + hobby: 10, + standardNew: 50, + standardnew: 50, + growth: 500, + growthdouble: 500, }, preview: { free: 5, @@ -49,7 +65,7 @@ const RATE_LIMITS = { }, crawlStatus: { free: 150, - default: 150, + default: 250, }, testSuite: { free: 10000, @@ -81,16 +97,36 @@ export const testSuiteRateLimiter = new RateLimiterRedis({ duration: 60, // Duration in seconds }); +export const devBRateLimiter = new RateLimiterRedis({ + storeClient: redisRateLimitClient, + keyPrefix: "dev-b", + points: 1200, + duration: 60, // Duration in seconds +}); + + +export const scrapeStatusRateLimiter = new RateLimiterRedis({ + storeClient: redisRateLimitClient, + keyPrefix: "scrape-status", + points: 400, + duration: 60, // Duration in seconds +}); + export function getRateLimiter( mode: RateLimiterMode, token: string, - plan?: string + plan?: string, + teamId?: string ) { - if (token.includes("a01ccae") || token.includes("6254cf9")) { + if (token.includes("a01ccae") || token.includes("6254cf9") || token.includes("0f96e673") || token.includes("23befa1b")) { return testSuiteRateLimiter; } + if(teamId && teamId === process.env.DEV_B_TEAM_ID) { + return devBRateLimiter; + } + const rateLimitConfig = RATE_LIMITS[mode]; // {default : 5} if (!rateLimitConfig) return serverRateLimiter; diff --git a/apps/api/src/services/redlock.ts b/apps/api/src/services/redlock.ts new file mode 100644 index 00000000..9cbfc1fc --- /dev/null +++ b/apps/api/src/services/redlock.ts @@ -0,0 +1,29 @@ +import Redlock from "redlock"; +import Client from "ioredis"; + +export const redlock = new Redlock( + // You should have one client for each independent redis node + // or cluster. + [new Client(process.env.REDIS_RATE_LIMIT_URL)], + { + // The expected clock drift; for more details see: + // http://redis.io/topics/distlock + driftFactor: 0.01, // multiplied by lock ttl to determine drift time + + // The max number of times Redlock will attempt to lock a resource + // before erroring. + retryCount: 5, + + // the time in ms between attempts + retryDelay: 100, // time in ms + + // the max time in ms randomly added to retries + // to improve performance under high contention + // see https://www.awsarchitectureblog.com/2015/03/backoff.html + retryJitter: 200, // time in ms + + // The minimum remaining time on a lock before an extension is automatically + // attempted with the `using` API. + automaticExtensionThreshold: 500, // time in ms + } +); diff --git a/apps/api/src/services/sentry.ts b/apps/api/src/services/sentry.ts new file mode 100644 index 00000000..176d3d4b --- /dev/null +++ b/apps/api/src/services/sentry.ts @@ -0,0 +1,18 @@ +// Import with `import * as Sentry from "@sentry/node"` if you are using ESM +import * as Sentry from "@sentry/node"; +import { nodeProfilingIntegration } from "@sentry/profiling-node"; +import { Logger } from "../lib/logger"; + +if (process.env.SENTRY_DSN) { + Logger.info("Setting up Sentry..."); + Sentry.init({ + dsn: process.env.SENTRY_DSN, + integrations: [ + nodeProfilingIntegration(), + ], + tracesSampleRate: process.env.SENTRY_ENVIRONMENT === "dev" ? 1.0 : 0.045, + profilesSampleRate: 1.0, + serverName: process.env.FLY_MACHINE_ID, + environment: process.env.SENTRY_ENVIRONMENT ?? "production", + }); +} diff --git a/apps/api/src/services/system-monitor.ts b/apps/api/src/services/system-monitor.ts new file mode 100644 index 00000000..c0574912 --- /dev/null +++ b/apps/api/src/services/system-monitor.ts @@ -0,0 +1,81 @@ +import si from 'systeminformation'; +import { Mutex } from "async-mutex"; + +const MAX_CPU = process.env.MAX_CPU ? parseFloat(process.env.MAX_CPU) : 0.8; +const MAX_RAM = process.env.MAX_RAM ? parseFloat(process.env.MAX_RAM) : 0.8; +const CACHE_DURATION = process.env.SYS_INFO_MAX_CACHE_DURATION ? parseFloat(process.env.SYS_INFO_MAX_CACHE_DURATION) : 150; + +class SystemMonitor { + private static instance: SystemMonitor; + private static instanceMutex = new Mutex(); + + private cpuUsageCache: number | null = null; + private memoryUsageCache: number | null = null; + private lastCpuCheck: number = 0; + private lastMemoryCheck: number = 0; + + private constructor() {} + + public static async getInstance(): Promise { + if (SystemMonitor.instance) { + return SystemMonitor.instance; + } + + await this.instanceMutex.runExclusive(async () => { + if (!SystemMonitor.instance) { + SystemMonitor.instance = new SystemMonitor(); + } + }); + + return SystemMonitor.instance; + } + + private async checkMemoryUsage() { + const now = Date.now(); + if (this.memoryUsageCache !== null && (now - this.lastMemoryCheck) < CACHE_DURATION) { + return this.memoryUsageCache; + } + + const memoryData = await si.mem(); + const totalMemory = memoryData.total; + const availableMemory = memoryData.available; + const usedMemory = totalMemory - availableMemory; + const usedMemoryPercentage = (usedMemory / totalMemory); + + this.memoryUsageCache = usedMemoryPercentage; + this.lastMemoryCheck = now; + + return usedMemoryPercentage; + } + + private async checkCpuUsage() { + const now = Date.now(); + if (this.cpuUsageCache !== null && (now - this.lastCpuCheck) < CACHE_DURATION) { + return this.cpuUsageCache; + } + + const cpuData = await si.currentLoad(); + const cpuLoad = cpuData.currentLoad / 100; + + this.cpuUsageCache = cpuLoad; + this.lastCpuCheck = now; + + return cpuLoad; + } + + public async acceptConnection() { + const cpuUsage = await this.checkCpuUsage(); + const memoryUsage = await this.checkMemoryUsage(); + + return cpuUsage < MAX_CPU && memoryUsage < MAX_RAM; + } + + public clearCache() { + this.cpuUsageCache = null; + this.memoryUsageCache = null; + this.lastCpuCheck = 0; + this.lastMemoryCheck = 0; + } +} + +export default SystemMonitor.getInstance(); \ No newline at end of file diff --git a/apps/api/src/services/webhook.ts b/apps/api/src/services/webhook.ts index b0222ea3..56dd5c58 100644 --- a/apps/api/src/services/webhook.ts +++ b/apps/api/src/services/webhook.ts @@ -1,22 +1,38 @@ +import axios from "axios"; +import { legacyDocumentConverter } from "../../src/controllers/v1/types"; import { Logger } from "../../src/lib/logger"; import { supabase_service } from "./supabase"; +import { WebhookEventType } from "../types"; -export const callWebhook = async (teamId: string, jobId: string,data: any) => { +export const callWebhook = async ( + teamId: string, + id: string, + data: any | null, + specified?: string, + v1 = false, + eventType: WebhookEventType = "crawl.page", + awaitWebhook: boolean = false +) => { try { - const selfHostedUrl = process.env.SELF_HOSTED_WEBHOOK_URL?.replace("{{JOB_ID}}", jobId); - const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === 'true'; - let webhookUrl = selfHostedUrl; + const selfHostedUrl = process.env.SELF_HOSTED_WEBHOOK_URL?.replace( + "{{JOB_ID}}", + id + ); + const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === "true"; + let webhookUrl = specified ?? selfHostedUrl; - // Only fetch the webhook URL from the database if the self-hosted webhook URL is not set + // Only fetch the webhook URL from the database if the self-hosted webhook URL and specified webhook are not set // and the USE_DB_AUTHENTICATION environment variable is set to true - if (!selfHostedUrl && useDbAuthentication) { + if (!webhookUrl && useDbAuthentication) { const { data: webhooksData, error } = await supabase_service .from("webhooks") .select("url") .eq("team_id", teamId) .limit(1); if (error) { - Logger.error(`Error fetching webhook URL for team ID: ${teamId}, error: ${error.message}`); + Logger.error( + `Error fetching webhook URL for team ID: ${teamId}, error: ${error.message}` + ); return null; } @@ -28,29 +44,93 @@ export const callWebhook = async (teamId: string, jobId: string,data: any) => { } let dataToSend = []; - if (data.result.links && data.result.links.length !== 0) { + if ( + data && + data.result && + data.result.links && + data.result.links.length !== 0 + ) { for (let i = 0; i < data.result.links.length; i++) { - dataToSend.push({ - content: data.result.links[i].content.content, - markdown: data.result.links[i].content.markdown, - metadata: data.result.links[i].content.metadata, - }); + if (v1) { + dataToSend.push( + legacyDocumentConverter(data.result.links[i].content) + ); + } else { + dataToSend.push({ + content: data.result.links[i].content.content, + markdown: data.result.links[i].content.markdown, + metadata: data.result.links[i].content.metadata, + }); + } } } - await fetch(webhookUrl, { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - success: data.success, - jobId: jobId, - data: dataToSend, - error: data.error || undefined, - }), - }); + if (awaitWebhook) { + try { + await axios.post( + webhookUrl, + { + success: !v1 + ? data.success + : eventType === "crawl.page" + ? data.success + : true, + type: eventType, + [v1 ? "id" : "jobId"]: id, + data: dataToSend, + error: !v1 + ? data?.error || undefined + : eventType === "crawl.page" + ? data?.error || undefined + : undefined, + }, + { + headers: { + "Content-Type": "application/json", + }, + timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1) + } + ); + } catch (error) { + Logger.error( + `Axios error (0) sending webhook for team ID: ${teamId}, error: ${error.message}` + ); + } + } else { + axios + .post( + webhookUrl, + { + success: !v1 + ? data.success + : eventType === "crawl.page" + ? data.success + : true, + type: eventType, + [v1 ? "id" : "jobId"]: id, + data: dataToSend, + error: !v1 + ? data?.error || undefined + : eventType === "crawl.page" + ? data?.error || undefined + : undefined, + }, + { + headers: { + "Content-Type": "application/json", + }, + timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1) + } + ) + .catch((error) => { + Logger.error( + `Axios error sending webhook for team ID: ${teamId}, error: ${error.message}` + ); + }); + } } catch (error) { - Logger.debug(`Error sending webhook for team ID: ${teamId}, error: ${error.message}`); + Logger.debug( + `Error sending webhook for team ID: ${teamId}, error: ${error.message}` + ); } }; diff --git a/apps/api/src/types.ts b/apps/api/src/types.ts index cef49f2f..50fb6eef 100644 --- a/apps/api/src/types.ts +++ b/apps/api/src/types.ts @@ -25,8 +25,14 @@ export interface WebScraperOptions { mode: Mode; crawlerOptions: any; pageOptions: any; + extractorOptions?: any; team_id: string; origin?: string; + crawl_id?: string; + sitemapped?: boolean; + webhook?: string; + v1?: boolean; + is_scrape?: boolean; } export interface RunWebScraperParams { @@ -34,11 +40,14 @@ export interface RunWebScraperParams { mode: Mode; crawlerOptions: any; pageOptions?: any; + extractorOptions?: any; inProgress: (progress: any) => void; - onSuccess: (result: any) => void; + onSuccess: (result: any, mode: string) => void; onError: (error: Error) => void; team_id: string; bull_job_id: string; + priority?: number; + is_scrape?: boolean; } export interface RunWebScraperResult { @@ -63,6 +72,7 @@ export interface FirecrawlJob { extractor_options?: ExtractorOptions, num_tokens?: number, retry?: boolean, + crawl_id?: string; } export interface FirecrawlScrapeResponse { @@ -99,6 +109,7 @@ export enum RateLimiterMode { Scrape = "scrape", Preview = "preview", Search = "search", + Map = "map", } @@ -107,7 +118,8 @@ export interface AuthResponse { team_id?: string; error?: string; status?: number; - plan?: string; + api_key?: string; + plan?: PlanType; } @@ -130,4 +142,18 @@ export type ScrapeLog = { html?: string; ipv4_support?: boolean | null; ipv6_support?: boolean | null; -}; \ No newline at end of file +}; + +export type PlanType = + | "starter" + | "standard" + | "scale" + | "hobby" + | "standardnew" + | "growth" + | "growthdouble" + | "free" + | ""; + + +export type WebhookEventType = "crawl.page" | "crawl.started" | "crawl.completed" | "crawl.failed"; \ No newline at end of file diff --git a/apps/api/tsconfig.json b/apps/api/tsconfig.json index 84007570..dd7f0ed2 100644 --- a/apps/api/tsconfig.json +++ b/apps/api/tsconfig.json @@ -2,16 +2,22 @@ "compilerOptions": { "rootDir": "./src", "lib": ["es6","DOM"], - "target": "ES2020", // or higher + + // or higher + "target": "ES2020", + "module": "commonjs", "esModuleInterop": true, "sourceMap": true, "outDir": "./dist/src", "moduleResolution": "node", "baseUrl": ".", + "paths": { "*": ["node_modules/*", "src/types/*"], - } + }, + + "inlineSources": true }, "include": ["src/","src/**/*", "services/db/supabase.ts", "utils/utils.ts", "services/db/supabaseEmbeddings.ts", "utils/EventEmmitter.ts", "src/services/queue-service.ts"] } diff --git a/apps/go-sdk/README.md b/apps/go-sdk/README.md deleted file mode 100644 index 353d28d6..00000000 --- a/apps/go-sdk/README.md +++ /dev/null @@ -1,189 +0,0 @@ -# Firecrawl Go SDK - -The Firecrawl Go SDK is a library that allows you to easily scrape and crawl websites, and output the data in a format ready for use with language models (LLMs). It provides a simple and intuitive interface for interacting with the Firecrawl API. - -## Installation - -To install the Firecrawl Go SDK, you can - -```bash -go get github.com/mendableai/firecrawl/go-sdk/firecrawl -``` - -## Usage - -1. Get an API key from [firecrawl.dev](https://firecrawl.dev) -2. Set the API key as an environment variable named `FIRECRAWL_API_KEY` or pass it as a parameter to the `FirecrawlApp` class. - - -Here's an example of how to use the SDK with error handling: - -```go -import ( - "fmt" - "log" - - "github.com/mendableai/firecrawl/go-sdk/firecrawl" -) - -func main() { - // Initialize the FirecrawlApp with your API key - app, err := firecrawl.NewFirecrawlApp("YOUR_API_KEY") - if err != nil { - log.Fatalf("Failed to initialize FirecrawlApp: %v", err) - } - - // Scrape a single URL - url := "https://mendable.ai" - scrapedData, err := app.ScrapeURL(url, nil) - if err != nil { - log.Fatalf("Error occurred while scraping: %v", err) - } - fmt.Println(scrapedData) - - // Crawl a website - crawlUrl := "https://mendable.ai" - params := map[string]any{ - "pageOptions": map[string]any{ - "onlyMainContent": true, - }, - } - - crawlResult, err := app.CrawlURL(crawlUrl, params) - if err != nil { - log.Fatalf("Error occurred while crawling: %v", err) - } - fmt.Println(crawlResult) -} -``` - -### Scraping a URL - -To scrape a single URL with error handling, use the `ScrapeURL` method. It takes the URL as a parameter and returns the scraped data as a dictionary. - -```go -url := "https://mendable.ai" -scrapedData, err := app.ScrapeURL(url, nil) -if err != nil { - log.Fatalf("Failed to scrape URL: %v", err) -} -fmt.Println(scrapedData) -``` - -### Extracting structured data from a URL - -With LLM extraction, you can easily extract structured data from any URL. Here is how you to use it: - -```go -jsonSchema := map[string]any{ - "type": "object", - "properties": map[string]any{ - "top": map[string]any{ - "type": "array", - "items": map[string]any{ - "type": "object", - "properties": map[string]any{ - "title": map[string]string{"type": "string"}, - "points": map[string]string{"type": "number"}, - "by": map[string]string{"type": "string"}, - "commentsURL": map[string]string{"type": "string"}, - }, - "required": []string{"title", "points", "by", "commentsURL"}, - }, - "minItems": 5, - "maxItems": 5, - "description": "Top 5 stories on Hacker News", - }, - }, - "required": []string{"top"}, -} - -llmExtractionParams := map[string]any{ - "extractorOptions": firecrawl.ExtractorOptions{ - ExtractionSchema: jsonSchema, - }, -} - -scrapeResult, err := app.ScrapeURL("https://news.ycombinator.com", llmExtractionParams) -if err != nil { - log.Fatalf("Failed to perform LLM extraction: %v", err) -} -fmt.Println(scrapeResult) -``` - -### Search for a query - -To search the web, get the most relevant results, scrap each page and return the markdown, use the `Search` method. The method takes the query as a parameter and returns the search results. - - -```go -query := "what is mendable?" -searchResult, err := app.Search(query) -if err != nil { - log.Fatalf("Failed to search: %v", err) -} -fmt.Println(searchResult) -``` - -### Crawling a Website - -To crawl a website, use the `CrawlUrl` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. - -```go -crawlParams := map[string]any{ - "crawlerOptions": map[string]any{ - "excludes": []string{"blog/*"}, - "includes": []string{}, // leave empty for all pages - "limit": 1000, - }, - "pageOptions": map[string]any{ - "onlyMainContent": true, - }, -} -crawlResult, err := app.CrawlURL("mendable.ai", crawlParams, true, 2, idempotencyKey) -if err != nil { - log.Fatalf("Failed to crawl URL: %v", err) -} -fmt.Println(crawlResult) -``` - -### Checking Crawl Status - -To check the status of a crawl job, use the `CheckCrawlStatus` method. It takes the job ID as a parameter and returns the current status of the crawl job. - -```go -status, err := app.CheckCrawlStatus(jobId) -if err != nil { - log.Fatalf("Failed to check crawl status: %v", err) -} -fmt.Println(status) -``` - -### Canceling a Crawl Job -To cancel a crawl job, use the `CancelCrawlJob` method. It takes the job ID as a parameter and returns the cancellation status of the crawl job. - -```go -canceled, err := app.CancelCrawlJob(jobId) -if err != nil { - log.Fatalf("Failed to cancel crawl job: %v", err) -} -fmt.Println(canceled) -``` - -## Error Handling - -The SDK handles errors returned by the Firecrawl API and raises appropriate exceptions. If an error occurs during a request, an exception will be raised with a descriptive error message. - -## Contributing - -Contributions to the Firecrawl Go SDK are welcome! If you find any issues or have suggestions for improvements, please open an issue or submit a pull request on the GitHub repository. - -## License - -The Firecrawl Go SDK is licensed under the MIT License. This means you are free to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the SDK, subject to the following conditions: - -- The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -Please note that while this SDK is MIT licensed, it is part of a larger project which may be under different licensing terms. Always refer to the license information in the root directory of the main project for overall licensing details. diff --git a/apps/go-sdk/examples/example.go b/apps/go-sdk/examples/example.go deleted file mode 100644 index 75194965..00000000 --- a/apps/go-sdk/examples/example.go +++ /dev/null @@ -1,87 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "log" - - "github.com/google/uuid" - "github.com/mendableai/firecrawl/go-sdk/firecrawl" -) - -func main() { - app, err := firecrawl.NewFirecrawlApp("fc-YOUR-API-KEY", "http://localhost:3002") - if err != nil { - log.Fatalf("Failed to create FirecrawlApp: %v", err) - } - - // Scrape a website - scrapeResult, err := app.ScrapeURL("firecrawl.dev", nil) - if err != nil { - log.Fatalf("Failed to scrape URL: %v", err) - } - fmt.Println(scrapeResult.Markdown) - - // Crawl a website - idempotencyKey := uuid.New().String() // optional idempotency key - crawlParams := map[string]any{ - "crawlerOptions": map[string]any{ - "excludes": []string{"blog/*"}, - }, - } - crawlResult, err := app.CrawlURL("mendable.ai", crawlParams, true, 2, idempotencyKey) - if err != nil { - log.Fatalf("Failed to crawl URL: %v", err) - } - jsonCrawlResult, err := json.MarshalIndent(crawlResult, "", " ") - if err != nil { - log.Fatalf("Failed to marshal crawl result: %v", err) - } - fmt.Println(string(jsonCrawlResult)) - - // LLM Extraction using JSON schema - jsonSchema := map[string]any{ - "type": "object", - "properties": map[string]any{ - "top": map[string]any{ - "type": "array", - "items": map[string]any{ - "type": "object", - "properties": map[string]any{ - "title": map[string]string{"type": "string"}, - "points": map[string]string{"type": "number"}, - "by": map[string]string{"type": "string"}, - "commentsURL": map[string]string{"type": "string"}, - }, - "required": []string{"title", "points", "by", "commentsURL"}, - }, - "minItems": 5, - "maxItems": 5, - "description": "Top 5 stories on Hacker News", - }, - }, - "required": []string{"top"}, - } - - llmExtractionParams := map[string]any{ - "extractorOptions": firecrawl.ExtractorOptions{ - ExtractionSchema: jsonSchema, - Mode: "llm-extraction", - }, - "pageOptions": map[string]any{ - "onlyMainContent": true, - }, - } - - llmExtractionResult, err := app.ScrapeURL("https://news.ycombinator.com", llmExtractionParams) - if err != nil { - log.Fatalf("Failed to perform LLM extraction: %v", err) - } - - // Pretty print the LLM extraction result - jsonResult, err := json.MarshalIndent(llmExtractionResult.LLMExtraction, "", " ") - if err != nil { - log.Fatalf("Failed to marshal LLM extraction result: %v", err) - } - fmt.Println(string(jsonResult)) -} diff --git a/apps/go-sdk/examples/go.mod b/apps/go-sdk/examples/go.mod deleted file mode 100644 index e3c5335d..00000000 --- a/apps/go-sdk/examples/go.mod +++ /dev/null @@ -1,10 +0,0 @@ -module github.com/mendableai/firecrawl/go-sdk/examples - -go 1.22.5 - -replace github.com/mendableai/firecrawl/go-sdk => ../ - -require ( - github.com/google/uuid v1.6.0 - github.com/mendableai/firecrawl/go-sdk v0.0.0-00010101000000-000000000000 -) diff --git a/apps/go-sdk/examples/go.sum b/apps/go-sdk/examples/go.sum deleted file mode 100644 index e724cfb0..00000000 --- a/apps/go-sdk/examples/go.sum +++ /dev/null @@ -1,12 +0,0 @@ -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= -github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/apps/go-sdk/firecrawl/firecrawl.go b/apps/go-sdk/firecrawl/firecrawl.go deleted file mode 100644 index 9a9dcfef..00000000 --- a/apps/go-sdk/firecrawl/firecrawl.go +++ /dev/null @@ -1,584 +0,0 @@ -// Package firecrawl provides a client for interacting with the Firecrawl API. -package firecrawl - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "math" - "net/http" - "os" - "time" -) - -// FirecrawlDocumentMetadata represents metadata for a Firecrawl document -type FirecrawlDocumentMetadata struct { - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Language string `json:"language,omitempty"` - Keywords string `json:"keywords,omitempty"` - Robots string `json:"robots,omitempty"` - OGTitle string `json:"ogTitle,omitempty"` - OGDescription string `json:"ogDescription,omitempty"` - OGURL string `json:"ogUrl,omitempty"` - OGImage string `json:"ogImage,omitempty"` - OGAudio string `json:"ogAudio,omitempty"` - OGDeterminer string `json:"ogDeterminer,omitempty"` - OGLocale string `json:"ogLocale,omitempty"` - OGLocaleAlternate []string `json:"ogLocaleAlternate,omitempty"` - OGSiteName string `json:"ogSiteName,omitempty"` - OGVideo string `json:"ogVideo,omitempty"` - DCTermsCreated string `json:"dctermsCreated,omitempty"` - DCDateCreated string `json:"dcDateCreated,omitempty"` - DCDate string `json:"dcDate,omitempty"` - DCTermsType string `json:"dctermsType,omitempty"` - DCType string `json:"dcType,omitempty"` - DCTermsAudience string `json:"dctermsAudience,omitempty"` - DCTermsSubject string `json:"dctermsSubject,omitempty"` - DCSubject string `json:"dcSubject,omitempty"` - DCDescription string `json:"dcDescription,omitempty"` - DCTermsKeywords string `json:"dctermsKeywords,omitempty"` - ModifiedTime string `json:"modifiedTime,omitempty"` - PublishedTime string `json:"publishedTime,omitempty"` - ArticleTag string `json:"articleTag,omitempty"` - ArticleSection string `json:"articleSection,omitempty"` - SourceURL string `json:"sourceURL,omitempty"` - PageStatusCode int `json:"pageStatusCode,omitempty"` - PageError string `json:"pageError,omitempty"` -} - -// FirecrawlDocument represents a document in Firecrawl -type FirecrawlDocument struct { - ID string `json:"id,omitempty"` - URL string `json:"url,omitempty"` - Content string `json:"content"` - Markdown string `json:"markdown,omitempty"` - HTML string `json:"html,omitempty"` - LLMExtraction map[string]any `json:"llm_extraction,omitempty"` - CreatedAt *time.Time `json:"createdAt,omitempty"` - UpdatedAt *time.Time `json:"updatedAt,omitempty"` - Type string `json:"type,omitempty"` - Metadata *FirecrawlDocumentMetadata `json:"metadata,omitempty"` - ChildrenLinks []string `json:"childrenLinks,omitempty"` - Provider string `json:"provider,omitempty"` - Warning string `json:"warning,omitempty"` - Index int `json:"index,omitempty"` -} - -// ExtractorOptions represents options for extraction. -type ExtractorOptions struct { - Mode string `json:"mode,omitempty"` - ExtractionPrompt string `json:"extractionPrompt,omitempty"` - ExtractionSchema any `json:"extractionSchema,omitempty"` -} - -// ScrapeResponse represents the response for scraping operations -type ScrapeResponse struct { - Success bool `json:"success"` - Data *FirecrawlDocument `json:"data,omitempty"` -} - -// SearchResponse represents the response for searching operations -type SearchResponse struct { - Success bool `json:"success"` - Data []*FirecrawlDocument `json:"data,omitempty"` -} - -// CrawlResponse represents the response for crawling operations -type CrawlResponse struct { - Success bool `json:"success"` - JobID string `json:"jobId,omitempty"` - Data []*FirecrawlDocument `json:"data,omitempty"` -} - -// JobStatusResponse represents the response for checking crawl job status -type JobStatusResponse struct { - Success bool `json:"success"` - Status string `json:"status"` - Current int `json:"current,omitempty"` - CurrentURL string `json:"current_url,omitempty"` - CurrentStep string `json:"current_step,omitempty"` - Total int `json:"total,omitempty"` - JobID string `json:"jobId,omitempty"` - Data []*FirecrawlDocument `json:"data,omitempty"` - PartialData []*FirecrawlDocument `json:"partial_data,omitempty"` -} - -// CancelCrawlJobResponse represents the response for canceling a crawl job -type CancelCrawlJobResponse struct { - Success bool `json:"success"` - Status string `json:"status"` -} - -// requestOptions represents options for making requests. -type requestOptions struct { - retries int - backoff int -} - -// requestOption is a functional option type for requestOptions. -type requestOption func(*requestOptions) - -// newRequestOptions creates a new requestOptions instance with the provided options. -// -// Parameters: -// - opts: Optional request options. -// -// Returns: -// - *requestOptions: A new instance of requestOptions with the provided options. -func newRequestOptions(opts ...requestOption) *requestOptions { - options := &requestOptions{retries: 1} - for _, opt := range opts { - opt(options) - } - return options -} - -// withRetries sets the number of retries for a request. -// -// Parameters: -// - retries: The number of retries to be performed. -// -// Returns: -// - requestOption: A functional option that sets the number of retries for a request. -func withRetries(retries int) requestOption { - return func(opts *requestOptions) { - opts.retries = retries - } -} - -// withBackoff sets the backoff interval for a request. -// -// Parameters: -// - backoff: The backoff interval (in milliseconds) to be used for retries. -// -// Returns: -// - requestOption: A functional option that sets the backoff interval for a request. -func withBackoff(backoff int) requestOption { - return func(opts *requestOptions) { - opts.backoff = backoff - } -} - -// FirecrawlApp represents a client for the Firecrawl API. -type FirecrawlApp struct { - APIKey string - APIURL string - Client *http.Client -} - -// NewFirecrawlApp creates a new instance of FirecrawlApp with the provided API key and API URL. -// If the API key or API URL is not provided, it attempts to retrieve them from environment variables. -// If the API key is still not found, it returns an error. -// -// Parameters: -// - apiKey: The API key for authenticating with the Firecrawl API. If empty, it will be retrieved from the FIRECRAWL_API_KEY environment variable. -// - apiURL: The base URL for the Firecrawl API. If empty, it will be retrieved from the FIRECRAWL_API_URL environment variable, defaulting to "https://api.firecrawl.dev". -// -// Returns: -// - *FirecrawlApp: A new instance of FirecrawlApp configured with the provided or retrieved API key and API URL. -// - error: An error if the API key is not provided or retrieved. -func NewFirecrawlApp(apiKey, apiURL string) (*FirecrawlApp, error) { - if apiKey == "" { - apiKey = os.Getenv("FIRECRAWL_API_KEY") - if apiKey == "" { - return nil, fmt.Errorf("no API key provided") - } - } - - if apiURL == "" { - apiURL = os.Getenv("FIRECRAWL_API_URL") - if apiURL == "" { - apiURL = "https://api.firecrawl.dev" - } - } - - client := &http.Client{ - Timeout: 60 * time.Second, - } - - return &FirecrawlApp{ - APIKey: apiKey, - APIURL: apiURL, - Client: client, - }, nil -} - -// ScrapeURL scrapes the content of the specified URL using the Firecrawl API. -// -// Parameters: -// - url: The URL to be scraped. -// - params: Optional parameters for the scrape request, including extractor options for LLM extraction. -// -// Returns: -// - *FirecrawlDocument: The scraped document data. -// - error: An error if the scrape request fails. -func (app *FirecrawlApp) ScrapeURL(url string, params map[string]any) (*FirecrawlDocument, error) { - headers := app.prepareHeaders("") - scrapeBody := map[string]any{"url": url} - - if params != nil { - if extractorOptions, ok := params["extractorOptions"].(ExtractorOptions); ok { - if schema, ok := extractorOptions.ExtractionSchema.(interface{ schema() any }); ok { - extractorOptions.ExtractionSchema = schema.schema() - } - if extractorOptions.Mode == "" { - extractorOptions.Mode = "llm-extraction" - } - scrapeBody["extractorOptions"] = extractorOptions - } - - for key, value := range params { - if key != "extractorOptions" { - scrapeBody[key] = value - } - } - } - - resp, err := app.makeRequest( - http.MethodPost, - fmt.Sprintf("%s/v0/scrape", app.APIURL), - scrapeBody, - headers, - "scrape URL", - ) - if err != nil { - return nil, err - } - - var scrapeResponse ScrapeResponse - err = json.Unmarshal(resp, &scrapeResponse) - if err != nil { - return nil, err - } - - if scrapeResponse.Success { - return scrapeResponse.Data, nil - } - - return nil, fmt.Errorf("failed to scrape URL") -} - -// Search performs a search query using the Firecrawl API and returns the search results. -// -// Parameters: -// - query: The search query string. -// - params: Optional parameters for the search request. -// -// Returns: -// - []*FirecrawlDocument: A slice of FirecrawlDocument containing the search results. -// - error: An error if the search request fails. -func (app *FirecrawlApp) Search(query string, params map[string]any) ([]*FirecrawlDocument, error) { - headers := app.prepareHeaders("") - searchBody := map[string]any{"query": query} - for k, v := range params { - searchBody[k] = v - } - - resp, err := app.makeRequest( - http.MethodPost, - fmt.Sprintf("%s/v0/search", app.APIURL), - searchBody, - headers, - "search", - ) - if err != nil { - return nil, err - } - - var searchResponse SearchResponse - err = json.Unmarshal(resp, &searchResponse) - if err != nil { - return nil, err - } - - if searchResponse.Success { - return searchResponse.Data, nil - } - - return nil, fmt.Errorf("failed to search") -} - -// CrawlURL starts a crawl job for the specified URL using the Firecrawl API. -// -// Parameters: -// - url: The URL to crawl. -// - params: Optional parameters for the crawl request. -// - waitUntilDone: If true, the method will wait until the crawl job is completed before returning. -// - pollInterval: The interval (in seconds) at which to poll the job status if waitUntilDone is true. -// - idempotencyKey: An optional idempotency key to ensure the request is idempotent. -// -// Returns: -// - any: The job ID if waitUntilDone is false, or the crawl result if waitUntilDone is true. -// - error: An error if the crawl request fails. -func (app *FirecrawlApp) CrawlURL(url string, params map[string]any, waitUntilDone bool, pollInterval int, idempotencyKey string) (any, error) { - headers := app.prepareHeaders(idempotencyKey) - crawlBody := map[string]any{"url": url} - for k, v := range params { - crawlBody[k] = v - } - - resp, err := app.makeRequest( - http.MethodPost, - fmt.Sprintf("%s/v0/crawl", app.APIURL), - crawlBody, - headers, - "start crawl job", - withRetries(3), - withBackoff(500), - ) - if err != nil { - return nil, err - } - - var crawlResponse CrawlResponse - err = json.Unmarshal(resp, &crawlResponse) - if err != nil { - return nil, err - } - - if waitUntilDone { - return app.monitorJobStatus(crawlResponse.JobID, headers, pollInterval) - } - - if crawlResponse.JobID == "" { - return nil, fmt.Errorf("failed to get job ID") - } - - return crawlResponse.JobID, nil -} - -// CheckCrawlStatus checks the status of a crawl job using the Firecrawl API. -// -// Parameters: -// - jobID: The ID of the crawl job to check. -// -// Returns: -// - *JobStatusResponse: The status of the crawl job. -// - error: An error if the crawl status check request fails. -func (app *FirecrawlApp) CheckCrawlStatus(jobID string) (*JobStatusResponse, error) { - headers := app.prepareHeaders("") - resp, err := app.makeRequest( - http.MethodGet, - fmt.Sprintf("%s/v0/crawl/status/%s", app.APIURL, jobID), - nil, - headers, - "check crawl status", - withRetries(3), - withBackoff(500), - ) - if err != nil { - return nil, err - } - - var jobStatusResponse JobStatusResponse - err = json.Unmarshal(resp, &jobStatusResponse) - if err != nil { - return nil, err - } - - return &jobStatusResponse, nil -} - -// CancelCrawlJob cancels a crawl job using the Firecrawl API. -// -// Parameters: -// - jobID: The ID of the crawl job to cancel. -// -// Returns: -// - string: The status of the crawl job after cancellation. -// - error: An error if the crawl job cancellation request fails. -func (app *FirecrawlApp) CancelCrawlJob(jobID string) (string, error) { - headers := app.prepareHeaders("") - resp, err := app.makeRequest( - http.MethodDelete, - fmt.Sprintf("%s/v0/crawl/cancel/%s", app.APIURL, jobID), - nil, - headers, - "cancel crawl job", - ) - if err != nil { - return "", err - } - - var cancelCrawlJobResponse CancelCrawlJobResponse - err = json.Unmarshal(resp, &cancelCrawlJobResponse) - if err != nil { - return "", err - } - - return cancelCrawlJobResponse.Status, nil -} - -// prepareHeaders prepares the headers for an HTTP request. -// -// Parameters: -// - idempotencyKey: A string representing the idempotency key to be included in the headers. -// If the idempotency key is an empty string, it will not be included in the headers. -// -// Returns: -// - map[string]string: A map containing the headers for the HTTP request. -func (app *FirecrawlApp) prepareHeaders(idempotencyKey string) map[string]string { - headers := map[string]string{ - "Content-Type": "application/json", - "Authorization": fmt.Sprintf("Bearer %s", app.APIKey), - } - if idempotencyKey != "" { - headers["x-idempotency-key"] = idempotencyKey - } - return headers -} - -// makeRequest makes a request to the specified URL with the provided method, data, headers, and options. -// -// Parameters: -// - method: The HTTP method to use for the request (e.g., "GET", "POST", "DELETE"). -// - url: The URL to send the request to. -// - data: The data to be sent in the request body. -// - headers: The headers to be included in the request. -// - action: A string describing the action being performed. -// - opts: Optional request options. -// -// Returns: -// - []byte: The response body from the request. -// - error: An error if the request fails. -func (app *FirecrawlApp) makeRequest(method, url string, data map[string]any, headers map[string]string, action string, opts ...requestOption) ([]byte, error) { - var body []byte - var err error - if data != nil { - body, err = json.Marshal(data) - if err != nil { - return nil, err - } - } - - req, err := http.NewRequest(method, url, bytes.NewBuffer(body)) - if err != nil { - return nil, err - } - - for key, value := range headers { - req.Header.Set(key, value) - } - - var resp *http.Response - options := newRequestOptions(opts...) - for i := 0; i < options.retries; i++ { - resp, err = app.Client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != 502 { - break - } - - time.Sleep(time.Duration(math.Pow(2, float64(i))) * time.Duration(options.backoff) * time.Millisecond) - } - - respBody, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - statusCode := resp.StatusCode - if statusCode != 200 { - return nil, app.handleError(statusCode, respBody, action) - } - - return respBody, nil -} - -// monitorJobStatus monitors the status of a crawl job using the Firecrawl API. -// -// Parameters: -// - jobID: The ID of the crawl job to monitor. -// - headers: The headers to be included in the request. -// - pollInterval: The interval (in seconds) at which to poll the job status. -// -// Returns: -// - []*FirecrawlDocument: The crawl result if the job is completed. -// - error: An error if the crawl status check request fails. -func (app *FirecrawlApp) monitorJobStatus(jobID string, headers map[string]string, pollInterval int) ([]*FirecrawlDocument, error) { - attempts := 0 - for { - resp, err := app.makeRequest( - http.MethodGet, - fmt.Sprintf("%s/v0/crawl/status/%s", app.APIURL, jobID), - nil, - headers, - "check crawl status", - withRetries(3), - withBackoff(500), - ) - if err != nil { - return nil, err - } - - var statusData JobStatusResponse - err = json.Unmarshal(resp, &statusData) - if err != nil { - return nil, err - } - - status := statusData.Status - if status == "" { - return nil, fmt.Errorf("invalid status in response") - } - - if status == "completed" { - if statusData.Data != nil { - return statusData.Data, nil - } - attempts++ - if attempts > 3 { - return nil, fmt.Errorf("crawl job completed but no data was returned") - } - } else if status == "active" || status == "paused" || status == "pending" || status == "queued" || status == "waiting" { - pollInterval = max(pollInterval, 2) - time.Sleep(time.Duration(pollInterval) * time.Second) - } else { - return nil, fmt.Errorf("crawl job failed or was stopped. Status: %s", status) - } - } -} - -// handleError handles errors returned by the Firecrawl API. -// -// Parameters: -// - resp: The HTTP response object. -// - body: The response body from the HTTP response. -// - action: A string describing the action being performed. -// -// Returns: -// - error: An error describing the failure reason. -func (app *FirecrawlApp) handleError(statusCode int, body []byte, action string) error { - var errorData map[string]any - err := json.Unmarshal(body, &errorData) - if err != nil { - return fmt.Errorf("failed to parse error response: %v", err) - } - - errorMessage, _ := errorData["error"].(string) - if errorMessage == "" { - errorMessage = "No additional error details provided." - } - - var message string - switch statusCode { - case 402: - message = fmt.Sprintf("Payment Required: Failed to %s. %s", action, errorMessage) - case 408: - message = fmt.Sprintf("Request Timeout: Failed to %s as the request timed out. %s", action, errorMessage) - case 409: - message = fmt.Sprintf("Conflict: Failed to %s due to a conflict. %s", action, errorMessage) - case 500: - message = fmt.Sprintf("Internal Server Error: Failed to %s. %s", action, errorMessage) - default: - message = fmt.Sprintf("Unexpected error during %s: Status code %d. %s", action, statusCode, errorMessage) - } - - return fmt.Errorf(message) -} diff --git a/apps/go-sdk/firecrawl/firecrawl_test.go b/apps/go-sdk/firecrawl/firecrawl_test.go deleted file mode 100644 index 9d56c7ac..00000000 --- a/apps/go-sdk/firecrawl/firecrawl_test.go +++ /dev/null @@ -1,292 +0,0 @@ -package firecrawl - -import ( - "log" - "os" - "testing" - "time" - - "github.com/google/uuid" - "github.com/joho/godotenv" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -var API_URL string -var TEST_API_KEY string - -func init() { - err := godotenv.Load("../.env") - if err != nil { - log.Fatalf("Error loading .env file: %v", err) - } - API_URL = os.Getenv("API_URL") - TEST_API_KEY = os.Getenv("TEST_API_KEY") -} - -func TestNoAPIKey(t *testing.T) { - _, err := NewFirecrawlApp("", API_URL) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no API key provided") -} - -func TestScrapeURLInvalidAPIKey(t *testing.T) { - app, err := NewFirecrawlApp("invalid_api_key", API_URL) - require.NoError(t, err) - - _, err = app.ScrapeURL("https://firecrawl.dev", nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Unexpected error during scrape URL: Status code 401. Unauthorized: Invalid token") -} - -func TestBlocklistedURL(t *testing.T) { - app, err := NewFirecrawlApp(TEST_API_KEY, API_URL) - require.NoError(t, err) - - _, err = app.ScrapeURL("https://facebook.com/fake-test", nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Unexpected error during scrape URL: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions.") -} - -func TestSuccessfulResponseWithValidPreviewToken(t *testing.T) { - app, err := NewFirecrawlApp("this_is_just_a_preview_token", API_URL) - require.NoError(t, err) - - response, err := app.ScrapeURL("https://roastmywebsite.ai", nil) - require.NoError(t, err) - assert.NotNil(t, response) - - assert.Contains(t, response.Content, "_Roast_") -} - -func TestScrapeURLE2E(t *testing.T) { - app, err := NewFirecrawlApp(TEST_API_KEY, API_URL) - require.NoError(t, err) - - response, err := app.ScrapeURL("https://roastmywebsite.ai", nil) - require.NoError(t, err) - assert.NotNil(t, response) - - assert.Contains(t, response.Content, "_Roast_") - assert.NotEqual(t, response.Markdown, "") - assert.NotNil(t, response.Metadata) - assert.Equal(t, response.HTML, "") -} - -func TestSuccessfulResponseWithValidAPIKeyAndIncludeHTML(t *testing.T) { - app, err := NewFirecrawlApp(TEST_API_KEY, API_URL) - require.NoError(t, err) - - params := map[string]any{ - "pageOptions": map[string]any{ - "includeHtml": true, - }, - } - response, err := app.ScrapeURL("https://roastmywebsite.ai", params) - require.NoError(t, err) - assert.NotNil(t, response) - - assert.Contains(t, response.Content, "_Roast_") - assert.Contains(t, response.Markdown, "_Roast_") - assert.Contains(t, response.HTML, " { -// Crawl a website: -const idempotencyKey = uuidv4(); // optional -const crawlResult = await app.crawlUrl('mendable.ai', {crawlerOptions: {excludes: ['blog/*'], limit: 5}}, false, 2, idempotencyKey); -console.log(crawlResult) + // Scrape a website: + const scrapeResult = await app.scrapeUrl('firecrawl.dev'); -const jobId = await crawlResult['jobId']; -console.log(jobId); - -let job; -while (true) { - job = await app.checkCrawlStatus(jobId); - if (job.status == 'completed') { - break; + if (scrapeResult.success) { + console.log(scrapeResult.markdown) } - await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second -} -console.log(job.data[0].content); + // Crawl a website: + const crawlResult = await app.crawlUrl('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); + console.log(crawlResult); -// Search for a query: -const query = 'what is mendable?' -const searchResult = await app.search(query) -console.log(searchResult) + // Asynchronously crawl a website: + const asyncCrawlResult = await app.asyncCrawlUrl('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); + + if (asyncCrawlResult.success) { + const id = asyncCrawlResult.id; + console.log(id); -// LLM Extraction: -// Define schema to extract contents into using zod schema -const zodSchema = z.object({ - top: z - .array( - z.object({ - title: z.string(), - points: z.number(), - by: z.string(), - commentsURL: z.string(), - }) - ) - .length(5) - .describe("Top 5 stories on Hacker News"), -}); + let checkStatus; + if (asyncCrawlResult.success) { + while (true) { + checkStatus = await app.checkCrawlStatus(id); + if (checkStatus.success && checkStatus.status === 'completed') { + break; + } + await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second + } -let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: zodSchema }, -}); - -console.log(llmExtractionResult.data.llm_extraction); - -// Define schema to extract contents into using json schema -const jsonSchema = { - "type": "object", - "properties": { - "top": { - "type": "array", - "items": { - "type": "object", - "properties": { - "title": {"type": "string"}, - "points": {"type": "number"}, - "by": {"type": "string"}, - "commentsURL": {"type": "string"} - }, - "required": ["title", "points", "by", "commentsURL"] - }, - "minItems": 5, - "maxItems": 5, - "description": "Top 5 stories on Hacker News" + if (checkStatus.success && checkStatus.data) { + console.log(checkStatus.data[0].markdown); + } } - }, - "required": ["top"] + } + + // Map a website: + const mapResult = await app.mapUrl('https://firecrawl.dev'); + console.log(mapResult) + + + // Crawl a website with WebSockets: + const watch = await app.crawlUrlAndWatch('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); + + watch.addEventListener("document", doc => { + console.log("DOC", doc.detail); + }); + + watch.addEventListener("error", err => { + console.error("ERR", err.detail.error); + }); + + watch.addEventListener("done", state => { + console.log("DONE", state.detail.status); + }); } -llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: jsonSchema }, -}); - -console.log(llmExtractionResult.data.llm_extraction); \ No newline at end of file +main() diff --git a/apps/js-sdk/example.ts b/apps/js-sdk/example.ts index f314c080..4142416f 100644 --- a/apps/js-sdk/example.ts +++ b/apps/js-sdk/example.ts @@ -1,92 +1,61 @@ -import FirecrawlApp, { JobStatusResponse } from './firecrawl/src/index' //'@mendable/firecrawl-js'; -import { z } from "zod"; +import FirecrawlApp, { CrawlStatusResponse, ErrorResponse } from '@mendable/firecrawl-js'; const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"}); -// Scrape a website: -const scrapeResult = await app.scrapeUrl('firecrawl.dev'); +const main = async () => { -if (scrapeResult.data) { - console.log(scrapeResult.data.content) -} + // Scrape a website: + const scrapeResult = await app.scrapeUrl('firecrawl.dev'); -// Crawl a website: -const crawlResult = await app.crawlUrl('mendable.ai', {crawlerOptions: {excludes: ['blog/*'], limit: 5}}, false); -console.log(crawlResult) - -const jobId: string = await crawlResult['jobId']; -console.log(jobId); - -let job: JobStatusResponse; -while (true) { - job = await app.checkCrawlStatus(jobId); - if (job.status === 'completed') { - break; + if (scrapeResult.success) { + console.log(scrapeResult.markdown) } - await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second -} -if (job.data) { - console.log(job.data[0].content); -} + // Crawl a website: + const crawlResult = await app.crawlUrl('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); + console.log(crawlResult); -// Search for a query: -const query = 'what is mendable?' -const searchResult = await app.search(query) + // Asynchronously crawl a website: + const asyncCrawlResult = await app.asyncCrawlUrl('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); + + if (asyncCrawlResult.success) { + const id = asyncCrawlResult.id; + console.log(id); -// LLM Extraction: -// Define schema to extract contents into using zod schema -const zodSchema = z.object({ - top: z - .array( - z.object({ - title: z.string(), - points: z.number(), - by: z.string(), - commentsURL: z.string(), - }) - ) - .length(5) - .describe("Top 5 stories on Hacker News"), -}); + let checkStatus: CrawlStatusResponse | ErrorResponse; + if (asyncCrawlResult.success) { + while (true) { + checkStatus = await app.checkCrawlStatus(id); + if (checkStatus.success && checkStatus.status === 'completed') { + break; + } + await new Promise(resolve => setTimeout(resolve, 1000)); // wait 1 second + } -let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: zodSchema }, -}); - -if (llmExtractionResult.data) { - console.log(llmExtractionResult.data.llm_extraction); -} - -// Define schema to extract contents into using json schema -const jsonSchema = { - "type": "object", - "properties": { - "top": { - "type": "array", - "items": { - "type": "object", - "properties": { - "title": {"type": "string"}, - "points": {"type": "number"}, - "by": {"type": "string"}, - "commentsURL": {"type": "string"} - }, - "required": ["title", "points", "by", "commentsURL"] - }, - "minItems": 5, - "maxItems": 5, - "description": "Top 5 stories on Hacker News" + if (checkStatus.success && checkStatus.data) { + console.log(checkStatus.data[0].markdown); + } } - }, - "required": ["top"] -} - -llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: jsonSchema }, -}); - -if (llmExtractionResult.data) { - console.log(llmExtractionResult.data.llm_extraction); + } + + // Map a website: + const mapResult = await app.mapUrl('https://firecrawl.dev'); + console.log(mapResult) + + // Crawl a website with WebSockets: + const watch = await app.crawlUrlAndWatch('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); + + watch.addEventListener("document", doc => { + console.log("DOC", doc.detail); + }); + + watch.addEventListener("error", err => { + console.error("ERR", err.detail.error); + }); + + watch.addEventListener("done", state => { + console.log("DONE", state.detail.status); + }); } +main() \ No newline at end of file diff --git a/apps/go-sdk/LICENSE b/apps/js-sdk/firecrawl/LICENSE similarity index 100% rename from apps/go-sdk/LICENSE rename to apps/js-sdk/firecrawl/LICENSE diff --git a/apps/js-sdk/firecrawl/README.md b/apps/js-sdk/firecrawl/README.md index d916bf70..0f3a6824 100644 --- a/apps/js-sdk/firecrawl/README.md +++ b/apps/js-sdk/firecrawl/README.md @@ -1,10 +1,10 @@ -# Firecrawl JavaScript SDK +# Firecrawl Node SDK -The Firecrawl JavaScript SDK is a library that allows you to easily scrape and crawl websites, and output the data in a format ready for use with language models (LLMs). It provides a simple and intuitive interface for interacting with the Firecrawl API. +The Firecrawl Node SDK is a library that allows you to easily scrape and crawl websites, and output the data in a format ready for use with language models (LLMs). It provides a simple and intuitive interface for interacting with the Firecrawl API. ## Installation -To install the Firecrawl JavaScript SDK, you can use npm: +To install the Firecrawl Node SDK, you can use npm: ```bash npm install @mendable/firecrawl-js @@ -15,44 +15,31 @@ npm install @mendable/firecrawl-js 1. Get an API key from [firecrawl.dev](https://firecrawl.dev) 2. Set the API key as an environment variable named `FIRECRAWL_API_KEY` or pass it as a parameter to the `FirecrawlApp` class. - Here's an example of how to use the SDK with error handling: ```js - import FirecrawlApp from '@mendable/firecrawl-js'; +import FirecrawlApp, { CrawlParams, CrawlStatusResponse } from '@mendable/firecrawl-js'; - async function main() { - try { - // Initialize the FirecrawlApp with your API key - const app = new FirecrawlApp({ apiKey: "YOUR_API_KEY" }); +const app = new FirecrawlApp({apiKey: "fc-YOUR_API_KEY"}); - // Scrape a single URL - const url = 'https://mendable.ai'; - const scrapedData = await app.scrapeUrl(url); - console.log(scrapedData); - - // Crawl a website - const crawlUrl = 'https://mendable.ai'; - const params = { - crawlerOptions: { - excludes: ['blog/'], - includes: [], // leave empty for all pages - limit: 1000, - }, - pageOptions: { - onlyMainContent: true - } - }; +// Scrape a website +const scrapeResponse = await app.scrapeUrl('https://firecrawl.dev', { + formats: ['markdown', 'html'], +}); - const crawlResult = await app.crawlUrl(crawlUrl, params); - console.log(crawlResult); +if (scrapeResponse) { + console.log(scrapeResponse) +} - } catch (error) { - console.error('An error occurred:', error.message); - } +// Crawl a website +const crawlResponse = await app.crawlUrl('https://firecrawl.dev', { + limit: 100, + scrapeOptions: { + formats: ['markdown', 'html'], } +}) - main(); +console.log(crawlResponse) ``` ### Scraping a URL @@ -60,31 +47,54 @@ Here's an example of how to use the SDK with error handling: To scrape a single URL with error handling, use the `scrapeUrl` method. It takes the URL as a parameter and returns the scraped data as a dictionary. ```js - async function scrapeExample() { - try { - const url = 'https://example.com'; - const scrapedData = await app.scrapeUrl(url); - console.log(scrapedData); +const url = "https://example.com"; +const scrapedData = await app.scrapeUrl(url); +``` - } catch (error) { - console.error( - 'Error occurred while scraping:', - error.message - ); - } +### Crawling a Website + +To crawl a website with error handling, use the `crawlUrl` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. + +```js +const crawlResponse = await app.crawlUrl('https://firecrawl.dev', { + limit: 100, + scrapeOptions: { + formats: ['markdown', 'html'], } - - scrapeExample(); +}) +``` + + +### Asynchronous Crawl + +To initiate an asynchronous crawl of a website, utilize the AsyncCrawlURL method. This method requires the starting URL and optional parameters as inputs. The params argument enables you to define various settings for the asynchronous crawl, such as the maximum number of pages to crawl, permitted domains, and the output format. Upon successful initiation, this method returns an ID, which is essential for subsequently checking the status of the crawl. + +```js +const asyncCrawlResult = await app.asyncCrawlUrl('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); +``` + +### Checking Crawl Status + +To check the status of a crawl job with error handling, use the `checkCrawlStatus` method. It takes the job ID as a parameter and returns the current status of the crawl job` + +```js +const status = await app.checkCrawlStatus(id); ``` ### Extracting structured data from a URL -With LLM extraction, you can easily extract structured data from any URL. We support zod schemas to make it easier for you too. Here is how you to use it: +With LLM extraction, you can easily extract structured data from any URL. We support zod schema to make it easier for you too. Here is how you to use it: ```js +import FirecrawlApp from "@mendable/firecrawl-js"; import { z } from "zod"; -const zodSchema = z.object({ +const app = new FirecrawlApp({ + apiKey: "fc-YOUR_API_KEY", +}); + +// Define schema to extract contents into +const schema = z.object({ top: z .array( z.object({ @@ -98,98 +108,53 @@ const zodSchema = z.object({ .describe("Top 5 stories on Hacker News"), }); -let llmExtractionResult = await app.scrapeUrl("https://news.ycombinator.com", { - extractorOptions: { extractionSchema: zodSchema }, +const scrapeResult = await app.scrapeUrl("https://firecrawl.dev", { + extractorOptions: { extractionSchema: schema }, }); -console.log(llmExtractionResult.data.llm_extraction); +console.log(scrapeResult.data["llm_extraction"]); ``` -### Search for a query +### Map a Website -Used to search the web, get the most relevant results, scrap each page and return the markdown. +Use `map_url` to generate a list of URLs from a website. The `params` argument let you customize the mapping process, including options to exclude subdomains or to utilize the sitemap. ```js -query = 'what is mendable?' -searchResult = app.search(query) +const mapResult = await app.mapUrl('https://example.com') as MapResponse; +console.log(mapResult) ``` -### Crawling a Website +### Crawl a website with WebSockets -To crawl a website with error handling, use the `crawlUrl` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. +To crawl a website with WebSockets, use the `crawlUrlAndWatch` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. ```js -async function crawlExample() { - try { - const crawlUrl = 'https://example.com'; - const params = { - crawlerOptions: { - excludes: ['blog/'], - includes: [], // leave empty for all pages - limit: 1000, - }, - pageOptions: { - onlyMainContent: true - } - }; - const waitUntilDone = true; - const timeout = 5; - const crawlResult = await app.crawlUrl( - crawlUrl, - params, - waitUntilDone, - timeout - ); +// Crawl a website with WebSockets: +const watch = await app.crawlUrlAndWatch('mendable.ai', { excludePaths: ['blog/*'], limit: 5}); - console.log(crawlResult); +watch.addEventListener("document", doc => { + console.log("DOC", doc.detail); +}); - } catch (error) { - console.error( - 'Error occurred while crawling:', - error.message - ); - } -} +watch.addEventListener("error", err => { + console.error("ERR", err.detail.error); +}); -crawlExample(); -``` - - -### Checking Crawl Status - -To check the status of a crawl job with error handling, use the `checkCrawlStatus` method. It takes the job ID as a parameter and returns the current status of the crawl job. - -```js -async function checkStatusExample(jobId) { - try { - const status = await app.checkCrawlStatus(jobId); - console.log(status); - - } catch (error) { - console.error( - 'Error occurred while checking crawl status:', - error.message - ); - } -} -// Example usage, assuming you have a jobId -checkStatusExample('your_job_id_here'); -``` - -## Running Locally -To use the SDK when running Firecrawl locally, you can change the initial Firecrawl app instance to: -```js -const app = new FirecrawlApp({ apiKey: "YOUR_API_KEY", apiUrl: "http://localhost:3002" }); +watch.addEventListener("done", state => { + console.log("DONE", state.detail.status); +}); ``` ## Error Handling The SDK handles errors returned by the Firecrawl API and raises appropriate exceptions. If an error occurs during a request, an exception will be raised with a descriptive error message. The examples above demonstrate how to handle these errors using `try/catch` blocks. -## Contributing - -Contributions to the Firecrawl JavaScript SDK are welcome! If you find any issues or have suggestions for improvements, please open an issue or submit a pull request on the GitHub repository. - ## License -The Firecrawl JavaScript SDK is open-source and released under the [MIT License](https://opensource.org/licenses/MIT). +The Firecrawl Node SDK is licensed under the MIT License. This means you are free to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the SDK, subject to the following conditions: + +- The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Please note that while this SDK is MIT licensed, it is part of a larger project which may be under different licensing terms. Always refer to the license information in the root directory of the main project for overall licensing details. diff --git a/apps/js-sdk/firecrawl/build/cjs/index.js b/apps/js-sdk/firecrawl/build/cjs/index.js index da340cae..2908b09d 100644 --- a/apps/js-sdk/firecrawl/build/cjs/index.js +++ b/apps/js-sdk/firecrawl/build/cjs/index.js @@ -1,257 +1,264 @@ "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); +exports.CrawlWatcher = void 0; const axios_1 = __importDefault(require("axios")); -const zod_1 = require("zod"); const zod_to_json_schema_1 = require("zod-to-json-schema"); +const isows_1 = require("isows"); +const typescript_event_target_1 = require("typescript-event-target"); /** * Main class for interacting with the Firecrawl API. + * Provides methods for scraping, searching, crawling, and mapping web content. */ class FirecrawlApp { /** * Initializes a new instance of the FirecrawlApp class. - * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. + * @param config - Configuration options for the FirecrawlApp instance. */ constructor({ apiKey = null, apiUrl = null }) { this.apiKey = apiKey || ""; this.apiUrl = apiUrl || "https://api.firecrawl.dev"; - if (!this.apiKey) { - throw new Error("No API key provided"); - } } /** * Scrapes a URL using the Firecrawl API. - * @param {string} url - The URL to scrape. - * @param {Params | null} params - Additional parameters for the scrape request. - * @returns {Promise} The response from the scrape operation. + * @param url - The URL to scrape. + * @param params - Additional parameters for the scrape request. + * @returns The response from the scrape operation. */ - scrapeUrl(url, params = null) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const headers = { - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, + async scrapeUrl(url, params) { + const headers = { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + }; + let jsonData = { url, ...params }; + if (jsonData?.extract?.schema) { + let schema = jsonData.extract.schema; + // Try parsing the schema as a Zod schema + try { + schema = (0, zod_to_json_schema_1.zodToJsonSchema)(schema); + } + catch (error) { + } + jsonData = { + ...jsonData, + extract: { + ...jsonData.extract, + schema: schema, + }, }; - let jsonData = Object.assign({ url }, params); - if ((_a = params === null || params === void 0 ? void 0 : params.extractorOptions) === null || _a === void 0 ? void 0 : _a.extractionSchema) { - let schema = params.extractorOptions.extractionSchema; - // Check if schema is an instance of ZodSchema to correctly identify Zod schemas - if (schema instanceof zod_1.z.ZodSchema) { - schema = (0, zod_to_json_schema_1.zodToJsonSchema)(schema); - } - jsonData = Object.assign(Object.assign({}, jsonData), { extractorOptions: Object.assign(Object.assign({}, params.extractorOptions), { extractionSchema: schema, mode: params.extractorOptions.mode || "llm-extraction" }) }); - } - try { - const response = yield axios_1.default.post(this.apiUrl + "/v0/scrape", jsonData, { headers }); - if (response.status === 200) { - const responseData = response.data; - if (responseData.success) { - return responseData; - } - else { - throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); - } - } - else { - this.handleError(response, "scrape URL"); - } - } - catch (error) { - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; - }); - } - /** - * Searches for a query using the Firecrawl API. - * @param {string} query - The query to search for. - * @param {Params | null} params - Additional parameters for the search request. - * @returns {Promise} The response from the search operation. - */ - search(query, params = null) { - return __awaiter(this, void 0, void 0, function* () { - const headers = { - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, - }; - let jsonData = { query }; - if (params) { - jsonData = Object.assign(Object.assign({}, jsonData), params); - } - try { - const response = yield axios_1.default.post(this.apiUrl + "/v0/search", jsonData, { headers }); - if (response.status === 200) { - const responseData = response.data; - if (responseData.success) { - return responseData; - } - else { - throw new Error(`Failed to search. Error: ${responseData.error}`); - } - } - else { - this.handleError(response, "search"); - } - } - catch (error) { - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; - }); - } - /** - * Initiates a crawl job for a URL using the Firecrawl API. - * @param {string} url - The URL to crawl. - * @param {Params | null} params - Additional parameters for the crawl request. - * @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete. - * @param {number} pollInterval - Time in seconds for job status checks. - * @param {string} idempotencyKey - Optional idempotency key for the request. - * @returns {Promise} The response from the crawl operation. - */ - crawlUrl(url, params = null, waitUntilDone = true, pollInterval = 2, idempotencyKey) { - return __awaiter(this, void 0, void 0, function* () { - const headers = this.prepareHeaders(idempotencyKey); - let jsonData = { url }; - if (params) { - jsonData = Object.assign(Object.assign({}, jsonData), params); - } - try { - const response = yield this.postRequest(this.apiUrl + "/v0/crawl", jsonData, headers); - if (response.status === 200) { - const jobId = response.data.jobId; - if (waitUntilDone) { - return this.monitorJobStatus(jobId, headers, pollInterval); - } - else { - return { success: true, jobId }; - } - } - else { - this.handleError(response, "start crawl job"); - } - } - catch (error) { - console.log(error); - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; - }); - } - /** - * Checks the status of a crawl job using the Firecrawl API. - * @param {string} jobId - The job ID of the crawl operation. - * @returns {Promise} The response containing the job status. - */ - checkCrawlStatus(jobId) { - return __awaiter(this, void 0, void 0, function* () { - const headers = this.prepareHeaders(); - try { - const response = yield this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers); - if (response.status === 200) { + } + try { + const response = await axios_1.default.post(this.apiUrl + `/v1/scrape`, jsonData, { headers }); + if (response.status === 200) { + const responseData = response.data; + if (responseData.success) { return { success: true, - status: response.data.status, - current: response.data.current, - current_url: response.data.current_url, - current_step: response.data.current_step, - total: response.data.total, - data: response.data.data, - partial_data: !response.data.data - ? response.data.partial_data - : undefined, + warning: responseData.warning, + error: responseData.error, + ...responseData.data }; } else { - this.handleError(response, "check crawl status"); + throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); } } - catch (error) { + else { + this.handleError(response, "scrape URL"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + } + /** + * This method is intended to search for a query using the Firecrawl API. However, it is not supported in version 1 of the API. + * @param query - The search query string. + * @param params - Additional parameters for the search. + * @returns Throws an error advising to use version 0 of the API. + */ + async search(query, params) { + throw new Error("Search is not supported in v1, please update FirecrawlApp() initialization to use v0."); + } + /** + * Initiates a crawl job for a URL using the Firecrawl API. + * @param url - The URL to crawl. + * @param params - Additional parameters for the crawl request. + * @param pollInterval - Time in seconds for job status checks. + * @param idempotencyKey - Optional idempotency key for the request. + * @returns The response from the crawl operation. + */ + async crawlUrl(url, params, pollInterval = 2, idempotencyKey) { + const headers = this.prepareHeaders(idempotencyKey); + let jsonData = { url, ...params }; + try { + const response = await this.postRequest(this.apiUrl + `/v1/crawl`, jsonData, headers); + if (response.status === 200) { + const id = response.data.id; + return this.monitorJobStatus(id, headers, pollInterval); + } + else { + this.handleError(response, "start crawl job"); + } + } + catch (error) { + if (error.response?.data?.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } + else { throw new Error(error.message); } - return { - success: false, - status: "unknown", - current: 0, - current_url: "", - current_step: "", - total: 0, - error: "Internal server error.", - }; - }); + } + return { success: false, error: "Internal server error." }; + } + async asyncCrawlUrl(url, params, idempotencyKey) { + const headers = this.prepareHeaders(idempotencyKey); + let jsonData = { url, ...params }; + try { + const response = await this.postRequest(this.apiUrl + `/v1/crawl`, jsonData, headers); + if (response.status === 200) { + return response.data; + } + else { + this.handleError(response, "start crawl job"); + } + } + catch (error) { + if (error.response?.data?.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } + else { + throw new Error(error.message); + } + } + return { success: false, error: "Internal server error." }; + } + /** + * Checks the status of a crawl job using the Firecrawl API. + * @param id - The ID of the crawl operation. + * @returns The response containing the job status. + */ + async checkCrawlStatus(id) { + if (!id) { + throw new Error("No crawl ID provided"); + } + const headers = this.prepareHeaders(); + try { + const response = await this.getRequest(`${this.apiUrl}/v1/crawl/${id}`, headers); + if (response.status === 200) { + return ({ + success: true, + status: response.data.status, + total: response.data.total, + completed: response.data.completed, + creditsUsed: response.data.creditsUsed, + expiresAt: new Date(response.data.expiresAt), + next: response.data.next, + data: response.data.data, + error: response.data.error + }); + } + else { + this.handleError(response, "check crawl status"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + } + async crawlUrlAndWatch(url, params, idempotencyKey) { + const crawl = await this.asyncCrawlUrl(url, params, idempotencyKey); + if (crawl.success && crawl.id) { + const id = crawl.id; + return new CrawlWatcher(id, this); + } + throw new Error("Crawl job failed to start"); + } + async mapUrl(url, params) { + const headers = this.prepareHeaders(); + let jsonData = { url, ...params }; + try { + const response = await this.postRequest(this.apiUrl + `/v1/map`, jsonData, headers); + if (response.status === 200) { + return response.data; + } + else { + this.handleError(response, "map"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; } /** * Prepares the headers for an API request. - * @returns {AxiosRequestHeaders} The prepared headers. + * @param idempotencyKey - Optional key to ensure idempotency. + * @returns The prepared headers. */ prepareHeaders(idempotencyKey) { - return Object.assign({ "Content-Type": "application/json", Authorization: `Bearer ${this.apiKey}` }, (idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {})); + return { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + ...(idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {}), + }; } /** * Sends a POST request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {Params} data - The data to send in the request. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the POST request. + * @param url - The URL to send the request to. + * @param data - The data to send in the request. + * @param headers - The headers for the request. + * @returns The response from the POST request. */ postRequest(url, data, headers) { return axios_1.default.post(url, data, { headers }); } /** * Sends a GET request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the GET request. + * @param url - The URL to send the request to. + * @param headers - The headers for the request. + * @returns The response from the GET request. */ getRequest(url, headers) { return axios_1.default.get(url, { headers }); } /** * Monitors the status of a crawl job until completion or failure. - * @param {string} jobId - The job ID of the crawl operation. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @param {number} timeout - Timeout in seconds for job status checks. - * @returns {Promise} The final job status or data. + * @param id - The ID of the crawl operation. + * @param headers - The headers for the request. + * @param checkInterval - Interval in seconds for job status checks. + * @param checkUrl - Optional URL to check the status (used for v1 API) + * @returns The final job status or data. */ - monitorJobStatus(jobId, headers, checkInterval) { - return __awaiter(this, void 0, void 0, function* () { - while (true) { - const statusResponse = yield this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers); - if (statusResponse.status === 200) { - const statusData = statusResponse.data; - if (statusData.status === "completed") { - if ("data" in statusData) { - return statusData.data; - } - else { - throw new Error("Crawl job completed but no data was returned"); - } - } - else if (["active", "paused", "pending", "queued"].includes(statusData.status)) { - if (checkInterval < 2) { - checkInterval = 2; - } - yield new Promise((resolve) => setTimeout(resolve, checkInterval * 1000)); // Wait for the specified timeout before checking again + async monitorJobStatus(id, headers, checkInterval) { + while (true) { + const statusResponse = await this.getRequest(`${this.apiUrl}/v1/crawl/${id}`, headers); + if (statusResponse.status === 200) { + const statusData = statusResponse.data; + if (statusData.status === "completed") { + if ("data" in statusData) { + return statusData; } else { - throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`); + throw new Error("Crawl job completed but no data was returned"); } } + else if (["active", "paused", "pending", "queued", "scraping"].includes(statusData.status)) { + checkInterval = Math.max(checkInterval, 2); + await new Promise((resolve) => setTimeout(resolve, checkInterval * 1000)); + } else { - this.handleError(statusResponse, "check crawl status"); + throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`); } } - }); + else { + this.handleError(statusResponse, "check crawl status"); + } + } } /** * Handles errors from API responses. @@ -269,3 +276,72 @@ class FirecrawlApp { } } exports.default = FirecrawlApp; +class CrawlWatcher extends typescript_event_target_1.TypedEventTarget { + constructor(id, app) { + super(); + this.ws = new isows_1.WebSocket(`${app.apiUrl}/v1/crawl/${id}`, app.apiKey); + this.status = "scraping"; + this.data = []; + const messageHandler = (msg) => { + if (msg.type === "done") { + this.status = "completed"; + this.dispatchTypedEvent("done", new CustomEvent("done", { + detail: { + status: this.status, + data: this.data, + }, + })); + } + else if (msg.type === "error") { + this.status = "failed"; + this.dispatchTypedEvent("error", new CustomEvent("error", { + detail: { + status: this.status, + data: this.data, + error: msg.error, + }, + })); + } + else if (msg.type === "catchup") { + this.status = msg.data.status; + this.data.push(...(msg.data.data ?? [])); + for (const doc of this.data) { + this.dispatchTypedEvent("document", new CustomEvent("document", { + detail: doc, + })); + } + } + else if (msg.type === "document") { + this.dispatchTypedEvent("document", new CustomEvent("document", { + detail: msg.data, + })); + } + }; + this.ws.onmessage = ((ev) => { + if (typeof ev.data !== "string") { + this.ws.close(); + return; + } + const msg = JSON.parse(ev.data); + messageHandler(msg); + }).bind(this); + this.ws.onclose = ((ev) => { + const msg = JSON.parse(ev.reason); + messageHandler(msg); + }).bind(this); + this.ws.onerror = ((_) => { + this.status = "failed"; + this.dispatchTypedEvent("error", new CustomEvent("error", { + detail: { + status: this.status, + data: this.data, + error: "WebSocket error", + }, + })); + }).bind(this); + } + close() { + this.ws.close(); + } +} +exports.CrawlWatcher = CrawlWatcher; diff --git a/apps/js-sdk/firecrawl/build/esm/index.js b/apps/js-sdk/firecrawl/build/esm/index.js index 99de5e2b..4245cc37 100644 --- a/apps/js-sdk/firecrawl/build/esm/index.js +++ b/apps/js-sdk/firecrawl/build/esm/index.js @@ -1,252 +1,258 @@ -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; import axios from "axios"; -import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; +import { WebSocket } from "isows"; +import { TypedEventTarget } from "typescript-event-target"; /** * Main class for interacting with the Firecrawl API. + * Provides methods for scraping, searching, crawling, and mapping web content. */ export default class FirecrawlApp { /** * Initializes a new instance of the FirecrawlApp class. - * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. + * @param config - Configuration options for the FirecrawlApp instance. */ constructor({ apiKey = null, apiUrl = null }) { this.apiKey = apiKey || ""; this.apiUrl = apiUrl || "https://api.firecrawl.dev"; - if (!this.apiKey) { - throw new Error("No API key provided"); - } } /** * Scrapes a URL using the Firecrawl API. - * @param {string} url - The URL to scrape. - * @param {Params | null} params - Additional parameters for the scrape request. - * @returns {Promise} The response from the scrape operation. + * @param url - The URL to scrape. + * @param params - Additional parameters for the scrape request. + * @returns The response from the scrape operation. */ - scrapeUrl(url_1) { - return __awaiter(this, arguments, void 0, function* (url, params = null) { - var _a; - const headers = { - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, + async scrapeUrl(url, params) { + const headers = { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + }; + let jsonData = { url, ...params }; + if (jsonData?.extract?.schema) { + let schema = jsonData.extract.schema; + // Try parsing the schema as a Zod schema + try { + schema = zodToJsonSchema(schema); + } + catch (error) { + } + jsonData = { + ...jsonData, + extract: { + ...jsonData.extract, + schema: schema, + }, }; - let jsonData = Object.assign({ url }, params); - if ((_a = params === null || params === void 0 ? void 0 : params.extractorOptions) === null || _a === void 0 ? void 0 : _a.extractionSchema) { - let schema = params.extractorOptions.extractionSchema; - // Check if schema is an instance of ZodSchema to correctly identify Zod schemas - if (schema instanceof z.ZodSchema) { - schema = zodToJsonSchema(schema); - } - jsonData = Object.assign(Object.assign({}, jsonData), { extractorOptions: Object.assign(Object.assign({}, params.extractorOptions), { extractionSchema: schema, mode: params.extractorOptions.mode || "llm-extraction" }) }); - } - try { - const response = yield axios.post(this.apiUrl + "/v0/scrape", jsonData, { headers }); - if (response.status === 200) { - const responseData = response.data; - if (responseData.success) { - return responseData; - } - else { - throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); - } - } - else { - this.handleError(response, "scrape URL"); - } - } - catch (error) { - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; - }); - } - /** - * Searches for a query using the Firecrawl API. - * @param {string} query - The query to search for. - * @param {Params | null} params - Additional parameters for the search request. - * @returns {Promise} The response from the search operation. - */ - search(query_1) { - return __awaiter(this, arguments, void 0, function* (query, params = null) { - const headers = { - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, - }; - let jsonData = { query }; - if (params) { - jsonData = Object.assign(Object.assign({}, jsonData), params); - } - try { - const response = yield axios.post(this.apiUrl + "/v0/search", jsonData, { headers }); - if (response.status === 200) { - const responseData = response.data; - if (responseData.success) { - return responseData; - } - else { - throw new Error(`Failed to search. Error: ${responseData.error}`); - } - } - else { - this.handleError(response, "search"); - } - } - catch (error) { - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; - }); - } - /** - * Initiates a crawl job for a URL using the Firecrawl API. - * @param {string} url - The URL to crawl. - * @param {Params | null} params - Additional parameters for the crawl request. - * @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete. - * @param {number} pollInterval - Time in seconds for job status checks. - * @param {string} idempotencyKey - Optional idempotency key for the request. - * @returns {Promise} The response from the crawl operation. - */ - crawlUrl(url_1) { - return __awaiter(this, arguments, void 0, function* (url, params = null, waitUntilDone = true, pollInterval = 2, idempotencyKey) { - const headers = this.prepareHeaders(idempotencyKey); - let jsonData = { url }; - if (params) { - jsonData = Object.assign(Object.assign({}, jsonData), params); - } - try { - const response = yield this.postRequest(this.apiUrl + "/v0/crawl", jsonData, headers); - if (response.status === 200) { - const jobId = response.data.jobId; - if (waitUntilDone) { - return this.monitorJobStatus(jobId, headers, pollInterval); - } - else { - return { success: true, jobId }; - } - } - else { - this.handleError(response, "start crawl job"); - } - } - catch (error) { - console.log(error); - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; - }); - } - /** - * Checks the status of a crawl job using the Firecrawl API. - * @param {string} jobId - The job ID of the crawl operation. - * @returns {Promise} The response containing the job status. - */ - checkCrawlStatus(jobId) { - return __awaiter(this, void 0, void 0, function* () { - const headers = this.prepareHeaders(); - try { - const response = yield this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers); - if (response.status === 200) { + } + try { + const response = await axios.post(this.apiUrl + `/v1/scrape`, jsonData, { headers }); + if (response.status === 200) { + const responseData = response.data; + if (responseData.success) { return { success: true, - status: response.data.status, - current: response.data.current, - current_url: response.data.current_url, - current_step: response.data.current_step, - total: response.data.total, - data: response.data.data, - partial_data: !response.data.data - ? response.data.partial_data - : undefined, + warning: responseData.warning, + error: responseData.error, + ...responseData.data }; } else { - this.handleError(response, "check crawl status"); + throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); } } - catch (error) { + else { + this.handleError(response, "scrape URL"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + } + /** + * This method is intended to search for a query using the Firecrawl API. However, it is not supported in version 1 of the API. + * @param query - The search query string. + * @param params - Additional parameters for the search. + * @returns Throws an error advising to use version 0 of the API. + */ + async search(query, params) { + throw new Error("Search is not supported in v1, please update FirecrawlApp() initialization to use v0."); + } + /** + * Initiates a crawl job for a URL using the Firecrawl API. + * @param url - The URL to crawl. + * @param params - Additional parameters for the crawl request. + * @param pollInterval - Time in seconds for job status checks. + * @param idempotencyKey - Optional idempotency key for the request. + * @returns The response from the crawl operation. + */ + async crawlUrl(url, params, pollInterval = 2, idempotencyKey) { + const headers = this.prepareHeaders(idempotencyKey); + let jsonData = { url, ...params }; + try { + const response = await this.postRequest(this.apiUrl + `/v1/crawl`, jsonData, headers); + if (response.status === 200) { + const id = response.data.id; + return this.monitorJobStatus(id, headers, pollInterval); + } + else { + this.handleError(response, "start crawl job"); + } + } + catch (error) { + if (error.response?.data?.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } + else { throw new Error(error.message); } - return { - success: false, - status: "unknown", - current: 0, - current_url: "", - current_step: "", - total: 0, - error: "Internal server error.", - }; - }); + } + return { success: false, error: "Internal server error." }; + } + async asyncCrawlUrl(url, params, idempotencyKey) { + const headers = this.prepareHeaders(idempotencyKey); + let jsonData = { url, ...params }; + try { + const response = await this.postRequest(this.apiUrl + `/v1/crawl`, jsonData, headers); + if (response.status === 200) { + return response.data; + } + else { + this.handleError(response, "start crawl job"); + } + } + catch (error) { + if (error.response?.data?.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } + else { + throw new Error(error.message); + } + } + return { success: false, error: "Internal server error." }; + } + /** + * Checks the status of a crawl job using the Firecrawl API. + * @param id - The ID of the crawl operation. + * @returns The response containing the job status. + */ + async checkCrawlStatus(id) { + if (!id) { + throw new Error("No crawl ID provided"); + } + const headers = this.prepareHeaders(); + try { + const response = await this.getRequest(`${this.apiUrl}/v1/crawl/${id}`, headers); + if (response.status === 200) { + return ({ + success: true, + status: response.data.status, + total: response.data.total, + completed: response.data.completed, + creditsUsed: response.data.creditsUsed, + expiresAt: new Date(response.data.expiresAt), + next: response.data.next, + data: response.data.data, + error: response.data.error + }); + } + else { + this.handleError(response, "check crawl status"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; + } + async crawlUrlAndWatch(url, params, idempotencyKey) { + const crawl = await this.asyncCrawlUrl(url, params, idempotencyKey); + if (crawl.success && crawl.id) { + const id = crawl.id; + return new CrawlWatcher(id, this); + } + throw new Error("Crawl job failed to start"); + } + async mapUrl(url, params) { + const headers = this.prepareHeaders(); + let jsonData = { url, ...params }; + try { + const response = await this.postRequest(this.apiUrl + `/v1/map`, jsonData, headers); + if (response.status === 200) { + return response.data; + } + else { + this.handleError(response, "map"); + } + } + catch (error) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; } /** * Prepares the headers for an API request. - * @returns {AxiosRequestHeaders} The prepared headers. + * @param idempotencyKey - Optional key to ensure idempotency. + * @returns The prepared headers. */ prepareHeaders(idempotencyKey) { - return Object.assign({ "Content-Type": "application/json", Authorization: `Bearer ${this.apiKey}` }, (idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {})); + return { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + ...(idempotencyKey ? { "x-idempotency-key": idempotencyKey } : {}), + }; } /** * Sends a POST request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {Params} data - The data to send in the request. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the POST request. + * @param url - The URL to send the request to. + * @param data - The data to send in the request. + * @param headers - The headers for the request. + * @returns The response from the POST request. */ postRequest(url, data, headers) { return axios.post(url, data, { headers }); } /** * Sends a GET request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the GET request. + * @param url - The URL to send the request to. + * @param headers - The headers for the request. + * @returns The response from the GET request. */ getRequest(url, headers) { return axios.get(url, { headers }); } /** * Monitors the status of a crawl job until completion or failure. - * @param {string} jobId - The job ID of the crawl operation. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @param {number} timeout - Timeout in seconds for job status checks. - * @returns {Promise} The final job status or data. + * @param id - The ID of the crawl operation. + * @param headers - The headers for the request. + * @param checkInterval - Interval in seconds for job status checks. + * @param checkUrl - Optional URL to check the status (used for v1 API) + * @returns The final job status or data. */ - monitorJobStatus(jobId, headers, checkInterval) { - return __awaiter(this, void 0, void 0, function* () { - while (true) { - const statusResponse = yield this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers); - if (statusResponse.status === 200) { - const statusData = statusResponse.data; - if (statusData.status === "completed") { - if ("data" in statusData) { - return statusData.data; - } - else { - throw new Error("Crawl job completed but no data was returned"); - } - } - else if (["active", "paused", "pending", "queued"].includes(statusData.status)) { - if (checkInterval < 2) { - checkInterval = 2; - } - yield new Promise((resolve) => setTimeout(resolve, checkInterval * 1000)); // Wait for the specified timeout before checking again + async monitorJobStatus(id, headers, checkInterval) { + while (true) { + const statusResponse = await this.getRequest(`${this.apiUrl}/v1/crawl/${id}`, headers); + if (statusResponse.status === 200) { + const statusData = statusResponse.data; + if (statusData.status === "completed") { + if ("data" in statusData) { + return statusData; } else { - throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`); + throw new Error("Crawl job completed but no data was returned"); } } + else if (["active", "paused", "pending", "queued", "scraping"].includes(statusData.status)) { + checkInterval = Math.max(checkInterval, 2); + await new Promise((resolve) => setTimeout(resolve, checkInterval * 1000)); + } else { - this.handleError(statusResponse, "check crawl status"); + throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`); } } - }); + else { + this.handleError(statusResponse, "check crawl status"); + } + } } /** * Handles errors from API responses. @@ -263,3 +269,71 @@ export default class FirecrawlApp { } } } +export class CrawlWatcher extends TypedEventTarget { + constructor(id, app) { + super(); + this.ws = new WebSocket(`${app.apiUrl}/v1/crawl/${id}`, app.apiKey); + this.status = "scraping"; + this.data = []; + const messageHandler = (msg) => { + if (msg.type === "done") { + this.status = "completed"; + this.dispatchTypedEvent("done", new CustomEvent("done", { + detail: { + status: this.status, + data: this.data, + }, + })); + } + else if (msg.type === "error") { + this.status = "failed"; + this.dispatchTypedEvent("error", new CustomEvent("error", { + detail: { + status: this.status, + data: this.data, + error: msg.error, + }, + })); + } + else if (msg.type === "catchup") { + this.status = msg.data.status; + this.data.push(...(msg.data.data ?? [])); + for (const doc of this.data) { + this.dispatchTypedEvent("document", new CustomEvent("document", { + detail: doc, + })); + } + } + else if (msg.type === "document") { + this.dispatchTypedEvent("document", new CustomEvent("document", { + detail: msg.data, + })); + } + }; + this.ws.onmessage = ((ev) => { + if (typeof ev.data !== "string") { + this.ws.close(); + return; + } + const msg = JSON.parse(ev.data); + messageHandler(msg); + }).bind(this); + this.ws.onclose = ((ev) => { + const msg = JSON.parse(ev.reason); + messageHandler(msg); + }).bind(this); + this.ws.onerror = ((_) => { + this.status = "failed"; + this.dispatchTypedEvent("error", new CustomEvent("error", { + detail: { + status: this.status, + data: this.data, + error: "WebSocket error", + }, + })); + }).bind(this); + } + close() { + this.ws.close(); + } +} diff --git a/apps/js-sdk/firecrawl/package-lock.json b/apps/js-sdk/firecrawl/package-lock.json index c42d6ca7..ce6a1a4a 100644 --- a/apps/js-sdk/firecrawl/package-lock.json +++ b/apps/js-sdk/firecrawl/package-lock.json @@ -1,16 +1,18 @@ { "name": "@mendable/firecrawl-js", - "version": "0.0.34", + "version": "1.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@mendable/firecrawl-js", - "version": "0.0.34", + "version": "1.1.0", "license": "MIT", "dependencies": { "axios": "^1.6.8", "dotenv": "^16.4.5", + "isows": "^1.0.4", + "typescript-event-target": "^1.1.1", "uuid": "^9.0.1", "zod": "^3.23.8", "zod-to-json-schema": "^3.23.0" @@ -2137,6 +2139,20 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, + "node_modules/isows": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/isows/-/isows-1.0.4.tgz", + "integrity": "sha512-hEzjY+x9u9hPmBom9IIAqdJCwNLax+xrPb51vEPpERoFlIxgmZcHzsT5jKG06nvInKOBGvReAVz80Umed5CczQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/wagmi-dev" + } + ], + "peerDependencies": { + "ws": "*" + } + }, "node_modules/istanbul-lib-coverage": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", @@ -3733,6 +3749,11 @@ "node": ">=14.17" } }, + "node_modules/typescript-event-target": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/typescript-event-target/-/typescript-event-target-1.1.1.tgz", + "integrity": "sha512-dFSOFBKV6uwaloBCCUhxlD3Pr/P1a/tJdcmPrTXCHlEFD3faj0mztjcGn6VBAhQ0/Bdy8K3VWrrqwbt/ffsYsg==" + }, "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", @@ -3855,6 +3876,27 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "peer": true, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/apps/js-sdk/firecrawl/package.json b/apps/js-sdk/firecrawl/package.json index 380d972b..e68b3014 100644 --- a/apps/js-sdk/firecrawl/package.json +++ b/apps/js-sdk/firecrawl/package.json @@ -1,6 +1,6 @@ { "name": "@mendable/firecrawl-js", - "version": "0.0.35", + "version": "1.2.1", "description": "JavaScript SDK for Firecrawl API", "main": "build/cjs/index.js", "types": "types/index.d.ts", @@ -19,7 +19,7 @@ "build": "tsc --module commonjs --moduleResolution node10 --outDir build/cjs/ && echo '{\"type\": \"commonjs\"}' > build/cjs/package.json && npx tsc --module NodeNext --moduleResolution NodeNext --outDir build/esm/ && echo '{\"type\": \"module\"}' > build/esm/package.json", "build-and-publish": "npm run build && npm publish --access public", "publish-beta": "npm run build && npm publish --access public --tag beta", - "test": "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/**/*.test.ts" + "test": "NODE_OPTIONS=--experimental-vm-modules jest --verbose src/__tests__/v1/**/*.test.ts" }, "repository": { "type": "git", @@ -30,6 +30,8 @@ "dependencies": { "axios": "^1.6.8", "dotenv": "^16.4.5", + "isows": "^1.0.4", + "typescript-event-target": "^1.1.1", "uuid": "^9.0.1", "zod": "^3.23.8", "zod-to-json-schema": "^3.23.0" diff --git a/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts b/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts index ad917de4..7d107afe 100644 --- a/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/js-sdk/firecrawl/src/__tests__/e2e_withAuth/index.test.ts @@ -1,160 +1,330 @@ -import FirecrawlApp from '../../index'; -import { v4 as uuidv4 } from 'uuid'; -import dotenv from 'dotenv'; -import { describe, test, expect } from '@jest/globals'; +import FirecrawlApp, { + CrawlResponseV0, + CrawlStatusResponse, + CrawlStatusResponseV0, + FirecrawlDocumentV0, + ScrapeResponseV0, + SearchResponseV0, +} from "../../index"; +import { v4 as uuidv4 } from "uuid"; +import dotenv from "dotenv"; +import { describe, test, expect } from "@jest/globals"; dotenv.config(); const TEST_API_KEY = process.env.TEST_API_KEY; const API_URL = "http://127.0.0.1:3002"; -describe('FirecrawlApp E2E Tests', () => { - test.concurrent('should throw error for no API key', async () => { +describe('FirecrawlApp<"v0"> E2E Tests', () => { + test.concurrent("should throw error for no API key", async () => { expect(() => { - new FirecrawlApp({ apiKey: null, apiUrl: API_URL }); + new FirecrawlApp<"v0">({ apiKey: null, apiUrl: API_URL, version: "v0" }); }).toThrow("No API key provided"); }); - test.concurrent('should throw error for invalid API key on scrape', async () => { - const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); - await expect(invalidApp.scrapeUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); - }); - - test.concurrent('should throw error for blocklisted URL on scrape', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const blocklistedUrl = "https://facebook.com/fake-test"; - await expect(app.scrapeUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); - }); - - test.concurrent('should return successful response with valid preview token', async () => { - const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); - const response = await app.scrapeUrl('https://roastmywebsite.ai'); - expect(response).not.toBeNull(); - expect(response.data?.content).toContain("_Roast_"); - }, 30000); // 30 seconds timeout - - test.concurrent('should return successful response for valid scrape', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://roastmywebsite.ai'); - expect(response).not.toBeNull(); - expect(response.data?.content).toContain("_Roast_"); - expect(response.data).toHaveProperty('markdown'); - expect(response.data).toHaveProperty('metadata'); - expect(response.data).not.toHaveProperty('html'); - }, 30000); // 30 seconds timeout - - test.concurrent('should return successful response with valid API key and include HTML', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://roastmywebsite.ai', { pageOptions: { includeHtml: true } }); - expect(response).not.toBeNull(); - expect(response.data?.content).toContain("_Roast_"); - expect(response.data?.markdown).toContain("_Roast_"); - expect(response.data?.html).toContain(" { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf'); - expect(response).not.toBeNull(); - expect(response.data?.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); - }, 30000); // 30 seconds timeout - - test.concurrent('should return successful response for valid scrape with PDF file without explicit extension', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001'); - expect(response).not.toBeNull(); - expect(response.data?.content).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); - }, 30000); // 30 seconds timeout - - test.concurrent('should throw error for invalid API key on crawl', async () => { - const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); - await expect(invalidApp.crawlUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); - }); - - test.concurrent('should throw error for blocklisted URL on crawl', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const blocklistedUrl = "https://twitter.com/fake-test"; - await expect(app.crawlUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); - }); - - test.concurrent('should return successful response for crawl and wait for completion', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, true, 30); - expect(response).not.toBeNull(); - expect(response[0].content).toContain("_Roast_"); - }, 60000); // 60 seconds timeout - - test.concurrent('should handle idempotency key for crawl', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const uniqueIdempotencyKey = uuidv4(); - const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, false, 2, uniqueIdempotencyKey); - expect(response).not.toBeNull(); - expect(response.jobId).toBeDefined(); - - await expect(app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, true, 2, uniqueIdempotencyKey)).rejects.toThrow("Request failed with status code 409"); - }); - - test.concurrent('should check crawl status', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.crawlUrl('https://roastmywebsite.ai', { crawlerOptions: { excludes: ['blog/*'] } }, false); - expect(response).not.toBeNull(); - expect(response.jobId).toBeDefined(); - - let statusResponse = await app.checkCrawlStatus(response.jobId); - const maxChecks = 15; - let checks = 0; - - while (statusResponse.status === 'active' && checks < maxChecks) { - await new Promise(resolve => setTimeout(resolve, 1000)); - expect(statusResponse.partial_data).not.toBeNull(); - expect(statusResponse.current).toBeGreaterThanOrEqual(1); - statusResponse = await app.checkCrawlStatus(response.jobId); - checks++; + test.concurrent( + "should throw error for invalid API key on scrape", + async () => { + const invalidApp = new FirecrawlApp<"v0">({ + apiKey: "invalid_api_key", + apiUrl: API_URL, + version: "v0", + }); + await expect( + invalidApp.scrapeUrl("https://roastmywebsite.ai") + ).rejects.toThrow("Request failed with status code 401"); } + ); - expect(statusResponse).not.toBeNull(); - expect(statusResponse.success).toBe(true); - expect(statusResponse.status).toBe('completed'); - expect(statusResponse.total).toEqual(statusResponse.current); - expect(statusResponse.current_step).not.toBeNull(); - expect(statusResponse?.data?.length).toBeGreaterThan(0); - }, 35000); // 35 seconds timeout + test.concurrent( + "should throw error for blocklisted URL on scrape", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const blocklistedUrl = "https://facebook.com/fake-test"; + await expect(app.scrapeUrl(blocklistedUrl)).rejects.toThrow( + "Request failed with status code 403" + ); + } + ); - test.concurrent('should return successful response for search', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.search("test query"); + test.concurrent( + "should return successful response with valid preview token", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: "this_is_just_a_preview_token", + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.scrapeUrl( + "https://roastmywebsite.ai" + )) as ScrapeResponseV0; + expect(response).not.toBeNull(); + expect(response.data?.content).toContain("_Roast_"); + }, + 30000 + ); // 30 seconds timeout + + test.concurrent( + "should return successful response for valid scrape", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.scrapeUrl( + "https://roastmywebsite.ai" + )) as ScrapeResponseV0; + expect(response).not.toBeNull(); + expect(response.data?.content).toContain("_Roast_"); + expect(response.data).toHaveProperty("markdown"); + expect(response.data).toHaveProperty("metadata"); + expect(response.data).not.toHaveProperty("html"); + }, + 30000 + ); // 30 seconds timeout + + test.concurrent( + "should return successful response with valid API key and include HTML", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.scrapeUrl("https://roastmywebsite.ai", { + pageOptions: { includeHtml: true }, + })) as ScrapeResponseV0; + expect(response).not.toBeNull(); + expect(response.data?.content).toContain("_Roast_"); + expect(response.data?.markdown).toContain("_Roast_"); + expect(response.data?.html).toContain(" { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.scrapeUrl( + "https://arxiv.org/pdf/astro-ph/9301001.pdf" + )) as ScrapeResponseV0; + expect(response).not.toBeNull(); + expect(response.data?.content).toContain( + "We present spectrophotometric observations of the Broad Line Radio Galaxy" + ); + }, + 30000 + ); // 30 seconds timeout + + test.concurrent( + "should return successful response for valid scrape with PDF file without explicit extension", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.scrapeUrl( + "https://arxiv.org/pdf/astro-ph/9301001" + )) as ScrapeResponseV0; + expect(response).not.toBeNull(); + expect(response.data?.content).toContain( + "We present spectrophotometric observations of the Broad Line Radio Galaxy" + ); + }, + 30000 + ); // 30 seconds timeout + + test.concurrent( + "should throw error for invalid API key on crawl", + async () => { + const invalidApp = new FirecrawlApp<"v0">({ + apiKey: "invalid_api_key", + apiUrl: API_URL, + version: "v0", + }); + await expect( + invalidApp.crawlUrl("https://roastmywebsite.ai") + ).rejects.toThrow("Request failed with status code 401"); + } + ); + + test.concurrent( + "should throw error for blocklisted URL on crawl", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const blocklistedUrl = "https://twitter.com/fake-test"; + await expect(app.crawlUrl(blocklistedUrl)).rejects.toThrow( + "Request failed with status code 403" + ); + } + ); + + test.concurrent( + "should return successful response for crawl and wait for completion", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.crawlUrl( + "https://roastmywebsite.ai", + { crawlerOptions: { excludes: ["blog/*"] } }, + true, + 10 + )) as FirecrawlDocumentV0[]; + expect(response).not.toBeNull(); + expect(response[0].content).toContain("_Roast_"); + }, + 60000 + ); // 60 seconds timeout + + test.concurrent("should handle idempotency key for crawl", async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const uniqueIdempotencyKey = uuidv4(); + const response = (await app.crawlUrl( + "https://roastmywebsite.ai", + { crawlerOptions: { excludes: ["blog/*"] } }, + false, + 2, + uniqueIdempotencyKey + )) as CrawlResponseV0; expect(response).not.toBeNull(); - expect(response?.data?.[0]?.content).toBeDefined(); - expect(response?.data?.length).toBeGreaterThan(2); - }, 30000); // 30 seconds timeout + expect(response.jobId).toBeDefined(); - test.concurrent('should throw error for invalid API key on search', async () => { - const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); - await expect(invalidApp.search("test query")).rejects.toThrow("Request failed with status code 401"); + await expect( + app.crawlUrl( + "https://roastmywebsite.ai", + { crawlerOptions: { excludes: ["blog/*"] } }, + true, + 2, + uniqueIdempotencyKey + ) + ).rejects.toThrow("Request failed with status code 409"); }); - test.concurrent('should perform LLM extraction', async () => { - const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); - const response = await app.scrapeUrl("https://mendable.ai", { - extractorOptions: { - mode: 'llm-extraction', - extractionPrompt: "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", - extractionSchema: { - type: 'object', - properties: { - company_mission: { type: 'string' }, - supports_sso: { type: 'boolean' }, - is_open_source: { type: 'boolean' } - }, - required: ['company_mission', 'supports_sso', 'is_open_source'] - } + test.concurrent( + "should check crawl status", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response: any = (await app.crawlUrl( + "https://roastmywebsite.ai", + { crawlerOptions: { excludes: ["blog/*"] } }, + false + )) as CrawlResponseV0; + expect(response).not.toBeNull(); + expect(response.jobId).toBeDefined(); + + let statusResponse = await app.checkCrawlStatus(response.jobId); + const maxChecks = 15; + let checks = 0; + + while (statusResponse.status === "active" && checks < maxChecks) { + await new Promise((resolve) => setTimeout(resolve, 5000)); + expect(statusResponse.partial_data).not.toBeNull(); + // expect(statusResponse.current).toBeGreaterThanOrEqual(1); + statusResponse = (await app.checkCrawlStatus( + response.jobId + )) as CrawlStatusResponseV0; + checks++; } - }); - expect(response).not.toBeNull(); - expect(response.data?.llm_extraction).toBeDefined(); - const llmExtraction = response.data?.llm_extraction; - expect(llmExtraction?.company_mission).toBeDefined(); - expect(typeof llmExtraction?.supports_sso).toBe('boolean'); - expect(typeof llmExtraction?.is_open_source).toBe('boolean'); - }, 30000); // 30 seconds timeout + + expect(statusResponse).not.toBeNull(); + expect(statusResponse.success).toBe(true); + expect(statusResponse.status).toBe("completed"); + expect(statusResponse.total).toEqual(statusResponse.current); + expect(statusResponse.current_step).not.toBeNull(); + expect(statusResponse.current).toBeGreaterThanOrEqual(1); + + expect(statusResponse?.data?.length).toBeGreaterThan(0); + }, + 35000 + ); // 35 seconds timeout + + test.concurrent( + "should return successful response for search", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.search("test query")) as SearchResponseV0; + expect(response).not.toBeNull(); + expect(response?.data?.[0]?.content).toBeDefined(); + expect(response?.data?.length).toBeGreaterThan(2); + }, + 30000 + ); // 30 seconds timeout + + test.concurrent( + "should throw error for invalid API key on search", + async () => { + const invalidApp = new FirecrawlApp<"v0">({ + apiKey: "invalid_api_key", + apiUrl: API_URL, + version: "v0", + }); + await expect(invalidApp.search("test query")).rejects.toThrow( + "Request failed with status code 401" + ); + } + ); + + test.concurrent( + "should perform LLM extraction", + async () => { + const app = new FirecrawlApp<"v0">({ + apiKey: TEST_API_KEY, + apiUrl: API_URL, + version: "v0", + }); + const response = (await app.scrapeUrl("https://mendable.ai", { + extractorOptions: { + mode: "llm-extraction", + extractionPrompt: + "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", + extractionSchema: { + type: "object", + properties: { + company_mission: { type: "string" }, + supports_sso: { type: "boolean" }, + is_open_source: { type: "boolean" }, + }, + required: ["company_mission", "supports_sso", "is_open_source"], + }, + }, + })) as ScrapeResponseV0; + expect(response).not.toBeNull(); + expect(response.data?.llm_extraction).toBeDefined(); + const llmExtraction = response.data?.llm_extraction; + expect(llmExtraction?.company_mission).toBeDefined(); + expect(typeof llmExtraction?.supports_sso).toBe("boolean"); + expect(typeof llmExtraction?.is_open_source).toBe("boolean"); + }, + 30000 + ); // 30 seconds timeout }); diff --git a/apps/js-sdk/firecrawl/src/__tests__/index.test.ts b/apps/js-sdk/firecrawl/src/__tests__/index.test.ts index dcda96f7..92951237 100644 --- a/apps/js-sdk/firecrawl/src/__tests__/index.test.ts +++ b/apps/js-sdk/firecrawl/src/__tests__/index.test.ts @@ -31,7 +31,7 @@ describe('the firecrawl JS SDK', () => { }); const apiKey = 'YOUR_API_KEY' - const app = new FirecrawlApp({ apiKey }); + const app = new FirecrawlApp<"v0">({ apiKey }); // Scrape a single URL const url = 'https://mendable.ai'; const scrapedData = await app.scrapeUrl(url); diff --git a/apps/js-sdk/firecrawl/src/__tests__/v1/e2e_withAuth/index.test.ts b/apps/js-sdk/firecrawl/src/__tests__/v1/e2e_withAuth/index.test.ts new file mode 100644 index 00000000..9f6c6462 --- /dev/null +++ b/apps/js-sdk/firecrawl/src/__tests__/v1/e2e_withAuth/index.test.ts @@ -0,0 +1,312 @@ +import FirecrawlApp, { CrawlParams, CrawlResponse, CrawlStatusResponse, MapResponse, ScrapeParams, ScrapeResponse } from '../../../index'; +import { v4 as uuidv4 } from 'uuid'; +import dotenv from 'dotenv'; +import { describe, test, expect } from '@jest/globals'; + +dotenv.config(); + +const TEST_API_KEY = process.env.TEST_API_KEY; +const API_URL = "http://127.0.0.1:3002"; + +describe('FirecrawlApp E2E Tests', () => { + test.concurrent('should throw error for no API key', async () => { + expect(() => { + new FirecrawlApp({ apiKey: null, apiUrl: API_URL }); + }).toThrow("No API key provided"); + }); + + test.concurrent('should throw error for invalid API key on scrape', async () => { + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + await expect(invalidApp.scrapeUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); + }); + + test.concurrent('should throw error for blocklisted URL on scrape', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const blocklistedUrl = "https://facebook.com/fake-test"; + await expect(app.scrapeUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); + }); + + test.concurrent('should return successful response with valid preview token', async () => { + const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); + const response = await app.scrapeUrl('https://roastmywebsite.ai') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response?.markdown).toContain("_Roast_"); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response for valid scrape', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl('https://roastmywebsite.ai') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response).not.toHaveProperty('content'); // v0 + expect(response).not.toHaveProperty('html'); + expect(response).not.toHaveProperty('rawHtml'); + expect(response).not.toHaveProperty('screenshot'); + expect(response).not.toHaveProperty('links'); + + expect(response).toHaveProperty('markdown'); + expect(response).toHaveProperty('metadata'); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response with valid API key and options', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl( + 'https://roastmywebsite.ai', { + formats: ['markdown', 'html', 'rawHtml', 'screenshot', 'links'], + headers: { "x-key": "test" }, + includeTags: ['h1'], + excludeTags: ['h2'], + onlyMainContent: true, + timeout: 30000, + waitFor: 1000 + }) as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response).not.toHaveProperty('content'); // v0 + expect(response.markdown).toContain("_Roast_"); + expect(response.html).toContain(" { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001.pdf') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response?.markdown).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response for valid scrape with PDF file without explicit extension', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.scrapeUrl('https://arxiv.org/pdf/astro-ph/9301001') as ScrapeResponse; + expect(response).not.toBeNull(); + expect(response?.markdown).toContain('We present spectrophotometric observations of the Broad Line Radio Galaxy'); + }, 30000); // 30 seconds timeout + + test.concurrent('should throw error for invalid API key on crawl', async () => { + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + await expect(invalidApp.crawlUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); + }); + + test.concurrent('should throw error for blocklisted URL on crawl', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const blocklistedUrl = "https://twitter.com/fake-test"; + await expect(app.crawlUrl(blocklistedUrl)).rejects.toThrow("URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions."); + }); + + test.concurrent('should return successful response for crawl and wait for completion', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.crawlUrl('https://roastmywebsite.ai', {}, true, 30) as CrawlStatusResponse; + expect(response).not.toBeNull(); + expect(response).toHaveProperty("total"); + expect(response.total).toBeGreaterThan(0); + expect(response).toHaveProperty("creditsUsed"); + expect(response.creditsUsed).toBeGreaterThan(0); + expect(response).toHaveProperty("expiresAt"); + expect(new Date(response.expiresAt).getTime()).toBeGreaterThan(Date.now()); + expect(response).toHaveProperty("status"); + expect(response.status).toBe("completed"); + expect(response).not.toHaveProperty("next"); // wait until done + expect(response.data?.length).toBeGreaterThan(0); + expect(response.data?.[0]).toHaveProperty("markdown"); + expect(response.data?.[0].markdown).toContain("_Roast_"); + expect(response.data?.[0]).not.toHaveProperty('content'); // v0 + expect(response.data?.[0]).not.toHaveProperty("html"); + expect(response.data?.[0]).not.toHaveProperty("rawHtml"); + expect(response.data?.[0]).not.toHaveProperty("screenshot"); + expect(response.data?.[0]).not.toHaveProperty("links"); + expect(response.data?.[0]).toHaveProperty("metadata"); + expect(response.data?.[0].metadata).toHaveProperty("title"); + expect(response.data?.[0].metadata).toHaveProperty("description"); + expect(response.data?.[0].metadata).toHaveProperty("language"); + expect(response.data?.[0].metadata).toHaveProperty("sourceURL"); + expect(response.data?.[0].metadata).toHaveProperty("statusCode"); + expect(response.data?.[0].metadata).not.toHaveProperty("error"); + }, 60000); // 60 seconds timeout + + test.concurrent('should return successful response for crawl with options and wait for completion', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.crawlUrl('https://roastmywebsite.ai', { + excludePaths: ['blog/*'], + includePaths: ['/'], + maxDepth: 2, + ignoreSitemap: true, + limit: 10, + allowBackwardLinks: true, + allowExternalLinks: true, + scrapeOptions: { + formats: ['markdown', 'html', 'rawHtml', 'screenshot', 'links'], + headers: { "x-key": "test" }, + includeTags: ['h1'], + excludeTags: ['h2'], + onlyMainContent: true, + waitFor: 1000 + } + } as CrawlParams, true, 30) as CrawlStatusResponse; + expect(response).not.toBeNull(); + expect(response).toHaveProperty("total"); + expect(response.total).toBeGreaterThan(0); + expect(response).toHaveProperty("creditsUsed"); + expect(response.creditsUsed).toBeGreaterThan(0); + expect(response).toHaveProperty("expiresAt"); + expect(new Date(response.expiresAt).getTime()).toBeGreaterThan(Date.now()); + expect(response).toHaveProperty("status"); + expect(response.status).toBe("completed"); + expect(response).not.toHaveProperty("next"); + expect(response.data?.length).toBeGreaterThan(0); + expect(response.data?.[0]).toHaveProperty("markdown"); + expect(response.data?.[0].markdown).toContain("_Roast_"); + expect(response.data?.[0]).not.toHaveProperty('content'); // v0 + expect(response.data?.[0]).toHaveProperty("html"); + expect(response.data?.[0].html).toContain(" { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const uniqueIdempotencyKey = uuidv4(); + const response = await app.crawlUrl('https://roastmywebsite.ai', {}, false, 2, uniqueIdempotencyKey) as CrawlResponse; + expect(response).not.toBeNull(); + expect(response.id).toBeDefined(); + + await expect(app.crawlUrl('https://roastmywebsite.ai', {}, true, 2, uniqueIdempotencyKey)).rejects.toThrow("Request failed with status code 409"); + }); + + test.concurrent('should check crawl status', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.crawlUrl('https://firecrawl.dev', { scrapeOptions: { formats: ['markdown', 'html', 'rawHtml', 'screenshot', 'links']}} as CrawlParams, false) as CrawlResponse; + expect(response).not.toBeNull(); + expect(response.id).toBeDefined(); + + let statusResponse = await app.checkCrawlStatus(response.id); + const maxChecks = 15; + let checks = 0; + + while (statusResponse.status === 'scraping' && checks < maxChecks) { + await new Promise(resolve => setTimeout(resolve, 5000)); + expect(statusResponse).not.toHaveProperty("partial_data"); // v0 + expect(statusResponse).not.toHaveProperty("current"); // v0 + expect(statusResponse).toHaveProperty("data"); + expect(statusResponse).toHaveProperty("total"); + expect(statusResponse).toHaveProperty("creditsUsed"); + expect(statusResponse).toHaveProperty("expiresAt"); + expect(statusResponse).toHaveProperty("status"); + expect(statusResponse).toHaveProperty("next"); + expect(statusResponse.total).toBeGreaterThan(0); + expect(statusResponse.creditsUsed).toBeGreaterThan(0); + expect(statusResponse.expiresAt.getTime()).toBeGreaterThan(Date.now()); + expect(statusResponse.status).toBe("scraping"); + expect(statusResponse.next).toContain("/v1/crawl/"); + statusResponse = await app.checkCrawlStatus(response.id) as CrawlStatusResponse; + checks++; + } + + expect(statusResponse).not.toBeNull(); + expect(statusResponse).toHaveProperty("total"); + expect(statusResponse.total).toBeGreaterThan(0); + expect(statusResponse).toHaveProperty("creditsUsed"); + expect(statusResponse.creditsUsed).toBeGreaterThan(0); + expect(statusResponse).toHaveProperty("expiresAt"); + expect(statusResponse.expiresAt.getTime()).toBeGreaterThan(Date.now()); + expect(statusResponse).toHaveProperty("status"); + expect(statusResponse.status).toBe("completed"); + expect(statusResponse.data?.length).toBeGreaterThan(0); + expect(statusResponse.data?.[0]).toHaveProperty("markdown"); + expect(statusResponse.data?.[0].markdown?.length).toBeGreaterThan(10); + expect(statusResponse.data?.[0]).not.toHaveProperty('content'); // v0 + expect(statusResponse.data?.[0]).toHaveProperty("html"); + expect(statusResponse.data?.[0].html).toContain(" { + const invalidApp = new FirecrawlApp({ apiKey: "invalid_api_key", apiUrl: API_URL }); + await expect(invalidApp.mapUrl('https://roastmywebsite.ai')).rejects.toThrow("Request failed with status code 401"); + }); + + test.concurrent('should throw error for blocklisted URL on map', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const blocklistedUrl = "https://facebook.com/fake-test"; + await expect(app.mapUrl(blocklistedUrl)).rejects.toThrow("Request failed with status code 403"); + }); + + test.concurrent('should return successful response with valid preview token', async () => { + const app = new FirecrawlApp({ apiKey: "this_is_just_a_preview_token", apiUrl: API_URL }); + const response = await app.mapUrl('https://roastmywebsite.ai') as MapResponse; + expect(response).not.toBeNull(); + expect(response.links?.length).toBeGreaterThan(0); + }, 30000); // 30 seconds timeout + + test.concurrent('should return successful response for valid map', async () => { + const app = new FirecrawlApp({ apiKey: TEST_API_KEY, apiUrl: API_URL }); + const response = await app.mapUrl('https://roastmywebsite.ai') as MapResponse; + expect(response).not.toBeNull(); + + expect(response.links?.length).toBeGreaterThan(0); + expect(response.links?.[0]).toContain("https://"); + const filteredLinks = response.links?.filter((link: string) => link.includes("roastmywebsite.ai")); + expect(filteredLinks?.length).toBeGreaterThan(0); + }, 30000); // 30 seconds timeout + + test('should throw NotImplementedError for search on v1', async () => { + const app = new FirecrawlApp({ apiUrl: API_URL, apiKey: TEST_API_KEY }); + await expect(app.search("test query")).rejects.toThrow("Search is not supported in v1"); + }); +}); diff --git a/apps/js-sdk/firecrawl/src/index.ts b/apps/js-sdk/firecrawl/src/index.ts index a42d4618..1d1bb4ee 100644 --- a/apps/js-sdk/firecrawl/src/index.ts +++ b/apps/js-sdk/firecrawl/src/index.ts @@ -1,8 +1,13 @@ import axios, { AxiosResponse, AxiosRequestHeaders } from "axios"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; +import { WebSocket } from "isows"; +import { TypedEventTarget } from "typescript-event-target"; + /** * Configuration interface for FirecrawlApp. + * @param apiKey - Optional API key for authentication. + * @param apiUrl - Optional base URL of the API; defaults to 'https://api.firecrawl.dev'. */ export interface FirecrawlAppConfig { apiKey?: string | null; @@ -11,6 +16,7 @@ export interface FirecrawlAppConfig { /** * Metadata for a Firecrawl document. + * Includes various optional properties for document metadata. */ export interface FirecrawlDocumentMetadata { title?: string; @@ -43,142 +49,192 @@ export interface FirecrawlDocumentMetadata { articleTag?: string; articleSection?: string; sourceURL?: string; - pageStatusCode?: number; - pageError?: string; - [key: string]: any; + statusCode?: number; + error?: string; + [key: string]: any; // Allows for additional metadata properties not explicitly defined. } /** * Document interface for Firecrawl. + * Represents a document retrieved or processed by Firecrawl. */ export interface FirecrawlDocument { - id?: string; url?: string; - content: string; markdown?: string; html?: string; - llm_extraction?: Record; - createdAt?: Date; - updatedAt?: Date; - type?: string; - metadata: FirecrawlDocumentMetadata; - childrenLinks?: string[]; - provider?: string; - warning?: string; + rawHtml?: string; + links?: string[]; + extract?: Record; + screenshot?: string; + metadata?: FirecrawlDocumentMetadata; +} - index?: number; +/** + * Parameters for scraping operations. + * Defines the options and configurations available for scraping web content. + */ +export interface ScrapeParams { + formats: ("markdown" | "html" | "rawHtml" | "content" | "links" | "screenshot" | "extract" | "full@scrennshot")[]; + headers?: Record; + includeTags?: string[]; + excludeTags?: string[]; + onlyMainContent?: boolean; + extract?: { + prompt?: string; + schema?: z.ZodSchema | any; + systemPrompt?: string; + }; + waitFor?: number; + timeout?: number; } /** * Response interface for scraping operations. + * Defines the structure of the response received after a scraping operation. */ -export interface ScrapeResponse { - success: boolean; - data?: FirecrawlDocument; +export interface ScrapeResponse extends FirecrawlDocument { + success: true; + warning?: string; error?: string; } + /** - * Response interface for searching operations. + * Parameters for crawling operations. + * Includes options for both scraping and mapping during a crawl. */ -export interface SearchResponse { - success: boolean; - data?: FirecrawlDocument[]; - error?: string; +export interface CrawlParams { + includePaths?: string[]; + excludePaths?: string[]; + maxDepth?: number; + limit?: number; + allowBackwardLinks?: boolean; + allowExternalLinks?: boolean; + ignoreSitemap?: boolean; + scrapeOptions?: ScrapeParams; + webhook?: string; } + /** * Response interface for crawling operations. + * Defines the structure of the response received after initiating a crawl. */ export interface CrawlResponse { - success: boolean; - jobId?: string; - data?: FirecrawlDocument[]; + id?: string; + url?: string; + success: true; error?: string; } + /** * Response interface for job status checks. + * Provides detailed status of a crawl job including progress and results. */ -export interface JobStatusResponse { - success: boolean; - status: string; - current?: number; - current_url?: string; - current_step?: string; - total?: number; - jobId?: string; +export interface CrawlStatusResponse { + success: true; + total: number; + completed: number; + creditsUsed: number; + expiresAt: Date; + status: "scraping" | "completed" | "failed"; + next: string; data?: FirecrawlDocument[]; - partial_data?: FirecrawlDocument[]; error?: string; } + /** - * Generic parameter interface. + * Parameters for mapping operations. + * Defines options for mapping URLs during a crawl. */ -export interface Params { - [key: string]: any; - extractorOptions?: { - extractionSchema: z.ZodSchema | any; - mode?: "llm-extraction"; - extractionPrompt?: string; - }; +export interface MapParams { + search?: string; + ignoreSitemap?: boolean; + includeSubdomains?: boolean; + limit?: number; } + +/** + * Response interface for mapping operations. + * Defines the structure of the response received after a mapping operation. + */ +export interface MapResponse { + success: true; + links?: string[]; + error?: string; +} + +/** + * Error response interface. + * Defines the structure of the response received when an error occurs. + */ +export interface ErrorResponse { + success: false; + error: string; +} + /** * Main class for interacting with the Firecrawl API. + * Provides methods for scraping, searching, crawling, and mapping web content. */ export default class FirecrawlApp { - private apiKey: string; - private apiUrl: string; + public apiKey: string; + public apiUrl: string; /** * Initializes a new instance of the FirecrawlApp class. - * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. + * @param config - Configuration options for the FirecrawlApp instance. */ constructor({ apiKey = null, apiUrl = null }: FirecrawlAppConfig) { this.apiKey = apiKey || ""; this.apiUrl = apiUrl || "https://api.firecrawl.dev"; - if (!this.apiKey) { - throw new Error("No API key provided"); - } } /** * Scrapes a URL using the Firecrawl API. - * @param {string} url - The URL to scrape. - * @param {Params | null} params - Additional parameters for the scrape request. - * @returns {Promise} The response from the scrape operation. + * @param url - The URL to scrape. + * @param params - Additional parameters for the scrape request. + * @returns The response from the scrape operation. */ async scrapeUrl( url: string, - params: Params | null = null - ): Promise { + params?: ScrapeParams + ): Promise { const headers: AxiosRequestHeaders = { "Content-Type": "application/json", Authorization: `Bearer ${this.apiKey}`, } as AxiosRequestHeaders; - let jsonData: Params = { url, ...params }; - if (params?.extractorOptions?.extractionSchema) { - let schema = params.extractorOptions.extractionSchema; - // Check if schema is an instance of ZodSchema to correctly identify Zod schemas - if (schema instanceof z.ZodSchema) { + let jsonData: any = { url, ...params }; + if (jsonData?.extract?.schema) { + let schema = jsonData.extract.schema; + + // Try parsing the schema as a Zod schema + try { schema = zodToJsonSchema(schema); + } catch (error) { + } jsonData = { ...jsonData, - extractorOptions: { - ...params.extractorOptions, - extractionSchema: schema, - mode: params.extractorOptions.mode || "llm-extraction", + extract: { + ...jsonData.extract, + schema: schema, }, }; } try { const response: AxiosResponse = await axios.post( - this.apiUrl + "/v0/scrape", + this.apiUrl + `/v1/scrape`, jsonData, { headers } ); if (response.status === 200) { const responseData = response.data; if (responseData.success) { - return responseData; + return { + success: true, + warning: responseData.warning, + error: responseData.error, + ...responseData.data + }; } else { throw new Error(`Failed to scrape URL. Error: ${responseData.error}`); } @@ -192,134 +248,161 @@ export default class FirecrawlApp { } /** - * Searches for a query using the Firecrawl API. - * @param {string} query - The query to search for. - * @param {Params | null} params - Additional parameters for the search request. - * @returns {Promise} The response from the search operation. + * This method is intended to search for a query using the Firecrawl API. However, it is not supported in version 1 of the API. + * @param query - The search query string. + * @param params - Additional parameters for the search. + * @returns Throws an error advising to use version 0 of the API. */ async search( query: string, - params: Params | null = null - ): Promise { - const headers: AxiosRequestHeaders = { - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, - } as AxiosRequestHeaders; - let jsonData: Params = { query }; - if (params) { - jsonData = { ...jsonData, ...params }; - } - try { - const response: AxiosResponse = await axios.post( - this.apiUrl + "/v0/search", - jsonData, - { headers } - ); - if (response.status === 200) { - const responseData = response.data; - if (responseData.success) { - return responseData; - } else { - throw new Error(`Failed to search. Error: ${responseData.error}`); - } - } else { - this.handleError(response, "search"); - } - } catch (error: any) { - throw new Error(error.message); - } - return { success: false, error: "Internal server error." }; + params?: any + ): Promise { + throw new Error("Search is not supported in v1, please update FirecrawlApp() initialization to use v0."); } /** * Initiates a crawl job for a URL using the Firecrawl API. - * @param {string} url - The URL to crawl. - * @param {Params | null} params - Additional parameters for the crawl request. - * @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete. - * @param {number} pollInterval - Time in seconds for job status checks. - * @param {string} idempotencyKey - Optional idempotency key for the request. - * @returns {Promise} The response from the crawl operation. + * @param url - The URL to crawl. + * @param params - Additional parameters for the crawl request. + * @param pollInterval - Time in seconds for job status checks. + * @param idempotencyKey - Optional idempotency key for the request. + * @returns The response from the crawl operation. */ async crawlUrl( url: string, - params: Params | null = null, - waitUntilDone: boolean = true, + params?: CrawlParams, pollInterval: number = 2, idempotencyKey?: string - ): Promise { + ): Promise { const headers = this.prepareHeaders(idempotencyKey); - let jsonData: Params = { url }; - if (params) { - jsonData = { ...jsonData, ...params }; - } + let jsonData: any = { url, ...params }; try { const response: AxiosResponse = await this.postRequest( - this.apiUrl + "/v0/crawl", + this.apiUrl + `/v1/crawl`, jsonData, headers ); if (response.status === 200) { - const jobId: string = response.data.jobId; - if (waitUntilDone) { - return this.monitorJobStatus(jobId, headers, pollInterval); - } else { - return { success: true, jobId }; - } + const id: string = response.data.id; + return this.monitorJobStatus(id, headers, pollInterval); } else { this.handleError(response, "start crawl job"); } } catch (error: any) { - console.log(error); - throw new Error(error.message); + if (error.response?.data?.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } else { + throw new Error(error.message); + } + } + return { success: false, error: "Internal server error." }; + } + + async asyncCrawlUrl( + url: string, + params?: CrawlParams, + idempotencyKey?: string + ): Promise { + const headers = this.prepareHeaders(idempotencyKey); + let jsonData: any = { url, ...params }; + try { + const response: AxiosResponse = await this.postRequest( + this.apiUrl + `/v1/crawl`, + jsonData, + headers + ); + if (response.status === 200) { + return response.data; + } else { + this.handleError(response, "start crawl job"); + } + } catch (error: any) { + if (error.response?.data?.error) { + throw new Error(`Request failed with status code ${error.response.status}. Error: ${error.response.data.error} ${error.response.data.details ? ` - ${JSON.stringify(error.response.data.details)}` : ''}`); + } else { + throw new Error(error.message); + } } return { success: false, error: "Internal server error." }; } /** * Checks the status of a crawl job using the Firecrawl API. - * @param {string} jobId - The job ID of the crawl operation. - * @returns {Promise} The response containing the job status. + * @param id - The ID of the crawl operation. + * @returns The response containing the job status. */ - async checkCrawlStatus(jobId: string): Promise { + async checkCrawlStatus(id?: string): Promise { + if (!id) { + throw new Error("No crawl ID provided"); + } + const headers: AxiosRequestHeaders = this.prepareHeaders(); try { const response: AxiosResponse = await this.getRequest( - this.apiUrl + `/v0/crawl/status/${jobId}`, + `${this.apiUrl}/v1/crawl/${id}`, headers ); if (response.status === 200) { - return { + return ({ success: true, status: response.data.status, - current: response.data.current, - current_url: response.data.current_url, - current_step: response.data.current_step, total: response.data.total, + completed: response.data.completed, + creditsUsed: response.data.creditsUsed, + expiresAt: new Date(response.data.expiresAt), + next: response.data.next, data: response.data.data, - partial_data: !response.data.data - ? response.data.partial_data - : undefined, - }; + error: response.data.error + }) } else { this.handleError(response, "check crawl status"); } } catch (error: any) { throw new Error(error.message); } - return { - success: false, - status: "unknown", - current: 0, - current_url: "", - current_step: "", - total: 0, - error: "Internal server error.", - }; + return { success: false, error: "Internal server error." }; + } + + async crawlUrlAndWatch( + url: string, + params?: CrawlParams, + idempotencyKey?: string, + ) { + const crawl = await this.asyncCrawlUrl(url, params, idempotencyKey); + + if (crawl.success && crawl.id) { + const id = crawl.id; + return new CrawlWatcher(id, this); + } + + throw new Error("Crawl job failed to start"); + } + + async mapUrl(url: string, params?: MapParams): Promise { + const headers = this.prepareHeaders(); + let jsonData: { url: string } & MapParams = { url, ...params }; + + try { + const response: AxiosResponse = await this.postRequest( + this.apiUrl + `/v1/map`, + jsonData, + headers + ); + if (response.status === 200) { + return response.data as MapResponse; + } else { + this.handleError(response, "map"); + } + } catch (error: any) { + throw new Error(error.message); + } + return { success: false, error: "Internal server error." }; } /** * Prepares the headers for an API request. - * @returns {AxiosRequestHeaders} The prepared headers. + * @param idempotencyKey - Optional key to ensure idempotency. + * @returns The prepared headers. */ prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders { return { @@ -331,14 +414,14 @@ export default class FirecrawlApp { /** * Sends a POST request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {Params} data - The data to send in the request. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the POST request. + * @param url - The URL to send the request to. + * @param data - The data to send in the request. + * @param headers - The headers for the request. + * @returns The response from the POST request. */ postRequest( url: string, - data: Params, + data: any, headers: AxiosRequestHeaders ): Promise { return axios.post(url, data, { headers }); @@ -346,9 +429,9 @@ export default class FirecrawlApp { /** * Sends a GET request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the GET request. + * @param url - The URL to send the request to. + * @param headers - The headers for the request. + * @returns The response from the GET request. */ getRequest( url: string, @@ -359,38 +442,37 @@ export default class FirecrawlApp { /** * Monitors the status of a crawl job until completion or failure. - * @param {string} jobId - The job ID of the crawl operation. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @param {number} timeout - Timeout in seconds for job status checks. - * @returns {Promise} The final job status or data. + * @param id - The ID of the crawl operation. + * @param headers - The headers for the request. + * @param checkInterval - Interval in seconds for job status checks. + * @param checkUrl - Optional URL to check the status (used for v1 API) + * @returns The final job status or data. */ async monitorJobStatus( - jobId: string, + id: string, headers: AxiosRequestHeaders, checkInterval: number - ): Promise { + ): Promise { while (true) { const statusResponse: AxiosResponse = await this.getRequest( - this.apiUrl + `/v0/crawl/status/${jobId}`, + `${this.apiUrl}/v1/crawl/${id}`, headers ); if (statusResponse.status === 200) { const statusData = statusResponse.data; if (statusData.status === "completed") { if ("data" in statusData) { - return statusData.data; + return statusData; } else { throw new Error("Crawl job completed but no data was returned"); } } else if ( - ["active", "paused", "pending", "queued"].includes(statusData.status) + ["active", "paused", "pending", "queued", "scraping"].includes(statusData.status) ) { - if (checkInterval < 2) { - checkInterval = 2; - } + checkInterval = Math.max(checkInterval, 2); await new Promise((resolve) => setTimeout(resolve, checkInterval * 1000) - ); // Wait for the specified timeout before checking again + ); } else { throw new Error( `Crawl job failed or was stopped. Status: ${statusData.status}` @@ -421,3 +503,111 @@ export default class FirecrawlApp { } } } + +interface CrawlWatcherEvents { + document: CustomEvent, + done: CustomEvent<{ + status: CrawlStatusResponse["status"]; + data: FirecrawlDocument[]; + }>, + error: CustomEvent<{ + status: CrawlStatusResponse["status"], + data: FirecrawlDocument[], + error: string, + }>, +} + +export class CrawlWatcher extends TypedEventTarget { + private ws: WebSocket; + public data: FirecrawlDocument[]; + public status: CrawlStatusResponse["status"]; + + constructor(id: string, app: FirecrawlApp) { + super(); + this.ws = new WebSocket(`${app.apiUrl}/v1/crawl/${id}`, app.apiKey); + this.status = "scraping"; + this.data = []; + + type ErrorMessage = { + type: "error", + error: string, + } + + type CatchupMessage = { + type: "catchup", + data: CrawlStatusResponse, + } + + type DocumentMessage = { + type: "document", + data: FirecrawlDocument, + } + + type DoneMessage = { type: "done" } + + type Message = ErrorMessage | CatchupMessage | DoneMessage | DocumentMessage; + + const messageHandler = (msg: Message) => { + if (msg.type === "done") { + this.status = "completed"; + this.dispatchTypedEvent("done", new CustomEvent("done", { + detail: { + status: this.status, + data: this.data, + }, + })); + } else if (msg.type === "error") { + this.status = "failed"; + this.dispatchTypedEvent("error", new CustomEvent("error", { + detail: { + status: this.status, + data: this.data, + error: msg.error, + }, + })); + } else if (msg.type === "catchup") { + this.status = msg.data.status; + this.data.push(...(msg.data.data ?? [])); + for (const doc of this.data) { + this.dispatchTypedEvent("document", new CustomEvent("document", { + detail: doc, + })); + } + } else if (msg.type === "document") { + this.dispatchTypedEvent("document", new CustomEvent("document", { + detail: msg.data, + })); + } + } + + this.ws.onmessage = ((ev: MessageEvent) => { + if (typeof ev.data !== "string") { + this.ws.close(); + return; + } + + const msg = JSON.parse(ev.data) as Message; + messageHandler(msg); + }).bind(this); + + this.ws.onclose = ((ev: CloseEvent) => { + const msg = JSON.parse(ev.reason) as Message; + messageHandler(msg); + }).bind(this); + + this.ws.onerror = ((_: Event) => { + this.status = "failed" + this.dispatchTypedEvent("error", new CustomEvent("error", { + detail: { + status: this.status, + data: this.data, + error: "WebSocket error", + }, + })); + }).bind(this); + } + + close() { + this.ws.close(); + } +} diff --git a/apps/js-sdk/firecrawl/tsconfig.json b/apps/js-sdk/firecrawl/tsconfig.json index d7764a46..56f13ced 100644 --- a/apps/js-sdk/firecrawl/tsconfig.json +++ b/apps/js-sdk/firecrawl/tsconfig.json @@ -11,7 +11,7 @@ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ /* Language and Environment */ - "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + "target": "es2020", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ // "jsx": "preserve", /* Specify what JSX code is generated. */ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ @@ -25,9 +25,9 @@ // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ /* Modules */ - "module": "NodeNext", /* Specify what module code is generated. */ + "module": "commonjs", /* Specify what module code is generated. */ "rootDir": "./src", /* Specify the root folder within your source files. */ - "moduleResolution": "nodenext", /* Specify how TypeScript looks up a file from a given module specifier. */ + "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ diff --git a/apps/js-sdk/firecrawl/types/index.d.ts b/apps/js-sdk/firecrawl/types/index.d.ts index bd6cfc20..36356c4e 100644 --- a/apps/js-sdk/firecrawl/types/index.d.ts +++ b/apps/js-sdk/firecrawl/types/index.d.ts @@ -1,7 +1,10 @@ import { AxiosResponse, AxiosRequestHeaders } from "axios"; import { z } from "zod"; +import { TypedEventTarget } from "typescript-event-target"; /** * Configuration interface for FirecrawlApp. + * @param apiKey - Optional API key for authentication. + * @param apiUrl - Optional base URL of the API; defaults to 'https://api.firecrawl.dev'. */ export interface FirecrawlAppConfig { apiKey?: string | null; @@ -9,6 +12,7 @@ export interface FirecrawlAppConfig { } /** * Metadata for a Firecrawl document. + * Includes various optional properties for document metadata. */ export interface FirecrawlDocumentMetadata { title?: string; @@ -41,149 +45,192 @@ export interface FirecrawlDocumentMetadata { articleTag?: string; articleSection?: string; sourceURL?: string; - pageStatusCode?: number; - pageError?: string; + statusCode?: number; + error?: string; [key: string]: any; } /** * Document interface for Firecrawl. + * Represents a document retrieved or processed by Firecrawl. */ export interface FirecrawlDocument { - id?: string; url?: string; - content: string; markdown?: string; html?: string; - llm_extraction?: Record; - createdAt?: Date; - updatedAt?: Date; - type?: string; - metadata: FirecrawlDocumentMetadata; - childrenLinks?: string[]; - provider?: string; - warning?: string; - index?: number; + rawHtml?: string; + links?: string[]; + extract?: Record; + screenshot?: string; + metadata?: FirecrawlDocumentMetadata; +} +/** + * Parameters for scraping operations. + * Defines the options and configurations available for scraping web content. + */ +export interface ScrapeParams { + formats: ("markdown" | "html" | "rawHtml" | "content" | "links" | "screenshot" | "extract" | "full@scrennshot")[]; + headers?: Record; + includeTags?: string[]; + excludeTags?: string[]; + onlyMainContent?: boolean; + extract?: { + prompt?: string; + schema?: z.ZodSchema | any; + systemPrompt?: string; + }; + waitFor?: number; + timeout?: number; } /** * Response interface for scraping operations. + * Defines the structure of the response received after a scraping operation. */ -export interface ScrapeResponse { - success: boolean; - data?: FirecrawlDocument; +export interface ScrapeResponse extends FirecrawlDocument { + success: true; + warning?: string; error?: string; } /** - * Response interface for searching operations. + * Parameters for crawling operations. + * Includes options for both scraping and mapping during a crawl. */ -export interface SearchResponse { - success: boolean; - data?: FirecrawlDocument[]; - error?: string; +export interface CrawlParams { + includePaths?: string[]; + excludePaths?: string[]; + maxDepth?: number; + limit?: number; + allowBackwardLinks?: boolean; + allowExternalLinks?: boolean; + ignoreSitemap?: boolean; + scrapeOptions?: ScrapeParams; + webhook?: string; } /** * Response interface for crawling operations. + * Defines the structure of the response received after initiating a crawl. */ export interface CrawlResponse { - success: boolean; - jobId?: string; - data?: FirecrawlDocument[]; + id?: string; + url?: string; + success: true; error?: string; } /** * Response interface for job status checks. + * Provides detailed status of a crawl job including progress and results. */ -export interface JobStatusResponse { - success: boolean; - status: string; - current?: number; - current_url?: string; - current_step?: string; - total?: number; - jobId?: string; +export interface CrawlStatusResponse { + success: true; + total: number; + completed: number; + creditsUsed: number; + expiresAt: Date; + status: "scraping" | "completed" | "failed"; + next: string; data?: FirecrawlDocument[]; - partial_data?: FirecrawlDocument[]; error?: string; } /** - * Generic parameter interface. + * Parameters for mapping operations. + * Defines options for mapping URLs during a crawl. */ -export interface Params { - [key: string]: any; - extractorOptions?: { - extractionSchema: z.ZodSchema | any; - mode?: "llm-extraction"; - extractionPrompt?: string; - }; +export interface MapParams { + search?: string; + ignoreSitemap?: boolean; + includeSubdomains?: boolean; + limit?: number; +} +/** + * Response interface for mapping operations. + * Defines the structure of the response received after a mapping operation. + */ +export interface MapResponse { + success: true; + links?: string[]; + error?: string; +} +/** + * Error response interface. + * Defines the structure of the response received when an error occurs. + */ +export interface ErrorResponse { + success: false; + error: string; } /** * Main class for interacting with the Firecrawl API. + * Provides methods for scraping, searching, crawling, and mapping web content. */ export default class FirecrawlApp { - private apiKey; - private apiUrl; + apiKey: string; + apiUrl: string; /** * Initializes a new instance of the FirecrawlApp class. - * @param {FirecrawlAppConfig} config - Configuration options for the FirecrawlApp instance. + * @param config - Configuration options for the FirecrawlApp instance. */ constructor({ apiKey, apiUrl }: FirecrawlAppConfig); /** * Scrapes a URL using the Firecrawl API. - * @param {string} url - The URL to scrape. - * @param {Params | null} params - Additional parameters for the scrape request. - * @returns {Promise} The response from the scrape operation. + * @param url - The URL to scrape. + * @param params - Additional parameters for the scrape request. + * @returns The response from the scrape operation. */ - scrapeUrl(url: string, params?: Params | null): Promise; + scrapeUrl(url: string, params?: ScrapeParams): Promise; /** - * Searches for a query using the Firecrawl API. - * @param {string} query - The query to search for. - * @param {Params | null} params - Additional parameters for the search request. - * @returns {Promise} The response from the search operation. + * This method is intended to search for a query using the Firecrawl API. However, it is not supported in version 1 of the API. + * @param query - The search query string. + * @param params - Additional parameters for the search. + * @returns Throws an error advising to use version 0 of the API. */ - search(query: string, params?: Params | null): Promise; + search(query: string, params?: any): Promise; /** * Initiates a crawl job for a URL using the Firecrawl API. - * @param {string} url - The URL to crawl. - * @param {Params | null} params - Additional parameters for the crawl request. - * @param {boolean} waitUntilDone - Whether to wait for the crawl job to complete. - * @param {number} pollInterval - Time in seconds for job status checks. - * @param {string} idempotencyKey - Optional idempotency key for the request. - * @returns {Promise} The response from the crawl operation. + * @param url - The URL to crawl. + * @param params - Additional parameters for the crawl request. + * @param pollInterval - Time in seconds for job status checks. + * @param idempotencyKey - Optional idempotency key for the request. + * @returns The response from the crawl operation. */ - crawlUrl(url: string, params?: Params | null, waitUntilDone?: boolean, pollInterval?: number, idempotencyKey?: string): Promise; + crawlUrl(url: string, params?: CrawlParams, pollInterval?: number, idempotencyKey?: string): Promise; + asyncCrawlUrl(url: string, params?: CrawlParams, idempotencyKey?: string): Promise; /** * Checks the status of a crawl job using the Firecrawl API. - * @param {string} jobId - The job ID of the crawl operation. - * @returns {Promise} The response containing the job status. + * @param id - The ID of the crawl operation. + * @returns The response containing the job status. */ - checkCrawlStatus(jobId: string): Promise; + checkCrawlStatus(id?: string): Promise; + crawlUrlAndWatch(url: string, params?: CrawlParams, idempotencyKey?: string): Promise; + mapUrl(url: string, params?: MapParams): Promise; /** * Prepares the headers for an API request. - * @returns {AxiosRequestHeaders} The prepared headers. + * @param idempotencyKey - Optional key to ensure idempotency. + * @returns The prepared headers. */ prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders; /** * Sends a POST request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {Params} data - The data to send in the request. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the POST request. + * @param url - The URL to send the request to. + * @param data - The data to send in the request. + * @param headers - The headers for the request. + * @returns The response from the POST request. */ - postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise; + postRequest(url: string, data: any, headers: AxiosRequestHeaders): Promise; /** * Sends a GET request to the specified URL. - * @param {string} url - The URL to send the request to. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @returns {Promise} The response from the GET request. + * @param url - The URL to send the request to. + * @param headers - The headers for the request. + * @returns The response from the GET request. */ getRequest(url: string, headers: AxiosRequestHeaders): Promise; /** * Monitors the status of a crawl job until completion or failure. - * @param {string} jobId - The job ID of the crawl operation. - * @param {AxiosRequestHeaders} headers - The headers for the request. - * @param {number} timeout - Timeout in seconds for job status checks. - * @returns {Promise} The final job status or data. + * @param id - The ID of the crawl operation. + * @param headers - The headers for the request. + * @param checkInterval - Interval in seconds for job status checks. + * @param checkUrl - Optional URL to check the status (used for v1 API) + * @returns The final job status or data. */ - monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise; + monitorJobStatus(id: string, headers: AxiosRequestHeaders, checkInterval: number): Promise; /** * Handles errors from API responses. * @param {AxiosResponse} response - The response from the API. @@ -191,3 +238,23 @@ export default class FirecrawlApp { */ handleError(response: AxiosResponse, action: string): void; } +interface CrawlWatcherEvents { + document: CustomEvent; + done: CustomEvent<{ + status: CrawlStatusResponse["status"]; + data: FirecrawlDocument[]; + }>; + error: CustomEvent<{ + status: CrawlStatusResponse["status"]; + data: FirecrawlDocument[]; + error: string; + }>; +} +export declare class CrawlWatcher extends TypedEventTarget { + private ws; + data: FirecrawlDocument[]; + status: CrawlStatusResponse["status"]; + constructor(id: string, app: FirecrawlApp); + close(): void; +} +export {}; diff --git a/apps/js-sdk/package-lock.json b/apps/js-sdk/package-lock.json index ca337062..95dd7d27 100644 --- a/apps/js-sdk/package-lock.json +++ b/apps/js-sdk/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "license": "ISC", "dependencies": { - "@mendable/firecrawl-js": "^0.0.19", + "@mendable/firecrawl-js": "^0.0.36", "axios": "^1.6.8", "ts-node": "^10.9.2", "typescript": "^5.4.5", @@ -422,15 +422,29 @@ } }, "node_modules/@mendable/firecrawl-js": { - "version": "0.0.19", - "resolved": "https://registry.npmjs.org/@mendable/firecrawl-js/-/firecrawl-js-0.0.19.tgz", - "integrity": "sha512-u9BDVIN/bftDztxLlE2cf02Nz0si3+Vmy9cANDFHj/iriT3guzI8ITBk4uC81CyRmPzNyXrW6hSAG90g9ol4cA==", + "version": "0.0.36", + "resolved": "https://registry.npmjs.org/@mendable/firecrawl-js/-/firecrawl-js-0.0.36.tgz", + "integrity": "sha512-5zQMWUD49r6Q7cxj+QBthQ964Bm9fMooW4E8E4nIca3BMXCeEuQFVf5C3OEWwZf0SjJvR+5Yx2wUbXJWd1wCOA==", "dependencies": { "axios": "^1.6.8", + "dotenv": "^16.4.5", + "uuid": "^9.0.1", "zod": "^3.23.8", "zod-to-json-schema": "^3.23.0" } }, + "node_modules/@mendable/firecrawl-js/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/@tsconfig/node10": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", @@ -531,6 +545,17 @@ "node": ">=0.3.1" } }, + "node_modules/dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/esbuild": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", diff --git a/apps/js-sdk/package.json b/apps/js-sdk/package.json index 2d2c36e8..b5d919f4 100644 --- a/apps/js-sdk/package.json +++ b/apps/js-sdk/package.json @@ -11,7 +11,7 @@ "author": "", "license": "ISC", "dependencies": { - "@mendable/firecrawl-js": "^0.0.19", + "@mendable/firecrawl-js": "^1.0.3", "axios": "^1.6.8", "ts-node": "^10.9.2", "typescript": "^5.4.5", diff --git a/apps/js-sdk/test.ts b/apps/js-sdk/test.ts deleted file mode 100644 index 5419c2d5..00000000 --- a/apps/js-sdk/test.ts +++ /dev/null @@ -1,28 +0,0 @@ -import FirecrawlApp from "@mendable/firecrawl-js"; -import { z } from "zod"; - -async function a() { - const app = new FirecrawlApp({ - apiKey: "fc-YOUR_API_KEY", - }); - - // Define schema to extract contents into - const schema = z.object({ - top: z - .array( - z.object({ - title: z.string(), - points: z.number(), - by: z.string(), - commentsURL: z.string(), - }) - ) - .length(5) - .describe("Top 5 stories on Hacker News"), - }); - const scrapeResult = await app.scrapeUrl("https://firecrawl.dev", { - extractorOptions: { extractionSchema: schema }, - }); - console.log(scrapeResult.data["llm_extraction"]); -} -a(); diff --git a/apps/python-sdk/README.md b/apps/python-sdk/README.md index 8505fec6..dcf44b25 100644 --- a/apps/python-sdk/README.md +++ b/apps/python-sdk/README.md @@ -18,23 +18,28 @@ pip install firecrawl-py Here's an example of how to use the SDK: ```python -from firecrawl import FirecrawlApp +from firecrawl.firecrawl import FirecrawlApp -# Initialize the FirecrawlApp with your API key -app = FirecrawlApp(api_key='your_api_key') +app = FirecrawlApp(api_key="fc-YOUR_API_KEY") -# Scrape a single URL -url = 'https://mendable.ai' -scraped_data = app.scrape_url(url) +# Scrape a website: +scrape_status = app.scrape_url( + 'https://firecrawl.dev', + params={'formats': ['markdown', 'html']} +) +print(scrape_status) -# Crawl a website -crawl_url = 'https://mendable.ai' -params = { - 'pageOptions': { - 'onlyMainContent': True - } -} -crawl_result = app.crawl_url(crawl_url, params=params) +# Crawl a website: +crawl_status = app.crawl_url( + 'https://firecrawl.dev', + params={ + 'limit': 100, + 'scrapeOptions': {'formats': ['markdown', 'html']} + }, + wait_until_done=True, + poll_interval=30 +) +print(crawl_status) ``` ### Scraping a URL @@ -72,45 +77,77 @@ data = app.scrape_url('https://news.ycombinator.com', { print(data["llm_extraction"]) ``` -### Search for a query - -Used to search the web, get the most relevant results, scrap each page and return the markdown. - -```python -query = 'what is mendable?' -search_result = app.search(query) -``` - ### Crawling a Website To crawl a website, use the `crawl_url` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. -The `wait_until_done` parameter determines whether the method should wait for the crawl job to complete before returning the result. If set to `True`, the method will periodically check the status of the crawl job until it is completed or the specified `timeout` (in seconds) is reached. If set to `False`, the method will return immediately with the job ID, and you can manually check the status of the crawl job using the `check_crawl_status` method. - ```python -crawl_url = 'https://example.com' -params = { - 'crawlerOptions': { - 'excludes': ['blog/*'], - 'includes': [], # leave empty for all pages - 'limit': 1000, - }, - 'pageOptions': { - 'onlyMainContent': True - } -} -crawl_result = app.crawl_url(crawl_url, params=params, wait_until_done=True, timeout=5) +idempotency_key = str(uuid.uuid4()) # optional idempotency key +crawl_result = app.crawl_url('firecrawl.dev', {'excludePaths': ['blog/*']}, 2, idempotency_key) +print(crawl_result) ``` -If `wait_until_done` is set to `True`, the `crawl_url` method will return the crawl result once the job is completed. If the job fails or is stopped, an exception will be raised. +### Asynchronous Crawl a Website + +To crawl a website asynchronously, use the `async_crawl_url` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. + +```python +crawl_result = app.async_crawl_url('firecrawl.dev', {'excludePaths': ['blog/*']}, "") +print(crawl_result) +``` ### Checking Crawl Status To check the status of a crawl job, use the `check_crawl_status` method. It takes the job ID as a parameter and returns the current status of the crawl job. ```python -job_id = crawl_result['jobId'] -status = app.check_crawl_status(job_id) +id = crawl_result['id'] +status = app.check_crawl_status(id) +``` + +### Map a Website + +Use `map_url` to generate a list of URLs from a website. The `params` argument let you customize the mapping process, including options to exclude subdomains or to utilize the sitemap. + +```python +# Map a website: +map_result = app.map_url('https://example.com') +print(map_result) +``` + +### Crawl a website with WebSockets + +To crawl a website with WebSockets, use the `crawl_url_and_watch` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. + +```python +# inside an async function... +nest_asyncio.apply() + +# Define event handlers +def on_document(detail): + print("DOC", detail) + +def on_error(detail): + print("ERR", detail['error']) + +def on_done(detail): + print("DONE", detail['status']) + + # Function to start the crawl and watch process +async def start_crawl_and_watch(): + # Initiate the crawl job and get the watcher + watcher = app.crawl_url_and_watch('firecrawl.dev', { 'excludePaths': ['blog/*'], 'limit': 5 }) + + # Add event listeners + watcher.add_event_listener("document", on_document) + watcher.add_event_listener("error", on_error) + watcher.add_event_listener("done", on_done) + + # Start the watcher + await watcher.connect() + +# Run the event loop +await start_crawl_and_watch() ``` ## Error Handling diff --git a/apps/python-sdk/build/lib/firecrawl/__init__.py b/apps/python-sdk/build/lib/firecrawl/__init__.py deleted file mode 100644 index e7f8063d..00000000 --- a/apps/python-sdk/build/lib/firecrawl/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .firecrawl import FirecrawlApp diff --git a/apps/python-sdk/build/lib/firecrawl/firecrawl.py b/apps/python-sdk/build/lib/firecrawl/firecrawl.py deleted file mode 100644 index 3f50c798..00000000 --- a/apps/python-sdk/build/lib/firecrawl/firecrawl.py +++ /dev/null @@ -1,299 +0,0 @@ -""" -FirecrawlApp Module - -This module provides a class `FirecrawlApp` for interacting with the Firecrawl API. -It includes methods to scrape URLs, perform searches, initiate and monitor crawl jobs, -and check the status of these jobs. The module uses requests for HTTP communication -and handles retries for certain HTTP status codes. - -Classes: - - FirecrawlApp: Main class for interacting with the Firecrawl API. -""" - -import os -import time -from typing import Any, Dict, Optional - -import requests - - -class FirecrawlApp: - """ - Initialize the FirecrawlApp instance. - - Args: - api_key (Optional[str]): API key for authenticating with the Firecrawl API. - api_url (Optional[str]): Base URL for the Firecrawl API. - """ - def __init__(self, api_key: Optional[str] = None, api_url: Optional[str] = None) -> None: - self.api_key = api_key or os.getenv('FIRECRAWL_API_KEY') - if self.api_key is None: - raise ValueError('No API key provided') - self.api_url = api_url or os.getenv('FIRECRAWL_API_URL', 'https://api.firecrawl.dev') - def scrape_url(self, url: str, params: Optional[Dict[str, Any]] = None) -> Any: - """ - Scrape the specified URL using the Firecrawl API. - - Args: - url (str): The URL to scrape. - params (Optional[Dict[str, Any]]): Additional parameters for the scrape request. - - Returns: - Any: The scraped data if the request is successful. - - Raises: - Exception: If the scrape request fails. - """ - - headers = { - 'Content-Type': 'application/json', - 'Authorization': f'Bearer {self.api_key}' - } - # Prepare the base scrape parameters with the URL - scrape_params = {'url': url} - - # If there are additional params, process them - if params: - # Initialize extractorOptions if present - extractor_options = params.get('extractorOptions', {}) - # Check and convert the extractionSchema if it's a Pydantic model - if 'extractionSchema' in extractor_options: - if hasattr(extractor_options['extractionSchema'], 'schema'): - extractor_options['extractionSchema'] = extractor_options['extractionSchema'].schema() - # Ensure 'mode' is set, defaulting to 'llm-extraction' if not explicitly provided - extractor_options['mode'] = extractor_options.get('mode', 'llm-extraction') - # Update the scrape_params with the processed extractorOptions - scrape_params['extractorOptions'] = extractor_options - - # Include any other params directly at the top level of scrape_params - for key, value in params.items(): - if key != 'extractorOptions': - scrape_params[key] = value - # Make the POST request with the prepared headers and JSON data - response = requests.post( - f'{self.api_url}/v0/scrape', - headers=headers, - json=scrape_params, - ) - if response.status_code == 200: - response = response.json() - if response['success'] and 'data' in response: - return response['data'] - else: - raise Exception(f'Failed to scrape URL. Error: {response["error"]}') - elif response.status_code in [402, 408, 409, 500]: - error_message = response.json().get('error', 'Unknown error occurred') - raise Exception(f'Failed to scrape URL. Status code: {response.status_code}. Error: {error_message}') - else: - raise Exception(f'Failed to scrape URL. Status code: {response.status_code}') - - def search(self, query, params=None): - """ - Perform a search using the Firecrawl API. - - Args: - query (str): The search query. - params (Optional[Dict[str, Any]]): Additional parameters for the search request. - - Returns: - Any: The search results if the request is successful. - - Raises: - Exception: If the search request fails. - """ - headers = { - 'Content-Type': 'application/json', - 'Authorization': f'Bearer {self.api_key}' - } - json_data = {'query': query} - if params: - json_data.update(params) - response = requests.post( - f'{self.api_url}/v0/search', - headers=headers, - json=json_data - ) - if response.status_code == 200: - response = response.json() - - if response['success'] and 'data' in response: - return response['data'] - else: - raise Exception(f'Failed to search. Error: {response["error"]}') - - elif response.status_code in [402, 409, 500]: - error_message = response.json().get('error', 'Unknown error occurred') - raise Exception(f'Failed to search. Status code: {response.status_code}. Error: {error_message}') - else: - raise Exception(f'Failed to search. Status code: {response.status_code}') - - def crawl_url(self, url, params=None, wait_until_done=True, timeout=2, idempotency_key=None): - """ - Initiate a crawl job for the specified URL using the Firecrawl API. - - Args: - url (str): The URL to crawl. - params (Optional[Dict[str, Any]]): Additional parameters for the crawl request. - wait_until_done (bool): Whether to wait until the crawl job is completed. - timeout (int): Timeout between status checks when waiting for job completion. - idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests. - - Returns: - Any: The crawl job ID or the crawl results if waiting until completion. - - Raises: - Exception: If the crawl job initiation or monitoring fails. - """ - headers = self._prepare_headers(idempotency_key) - json_data = {'url': url} - if params: - json_data.update(params) - response = self._post_request(f'{self.api_url}/v0/crawl', json_data, headers) - if response.status_code == 200: - job_id = response.json().get('jobId') - if wait_until_done: - return self._monitor_job_status(job_id, headers, timeout) - else: - return {'jobId': job_id} - else: - self._handle_error(response, 'start crawl job') - - def check_crawl_status(self, job_id): - """ - Check the status of a crawl job using the Firecrawl API. - - Args: - job_id (str): The ID of the crawl job. - - Returns: - Any: The status of the crawl job. - - Raises: - Exception: If the status check request fails. - """ - headers = self._prepare_headers() - response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers) - if response.status_code == 200: - return response.json() - else: - self._handle_error(response, 'check crawl status') - - def _prepare_headers(self, idempotency_key=None): - """ - Prepare the headers for API requests. - - Args: - idempotency_key (Optional[str]): A unique key to ensure idempotency of requests. - - Returns: - Dict[str, str]: The headers including content type, authorization, and optionally idempotency key. - """ - if idempotency_key: - return { - 'Content-Type': 'application/json', - 'Authorization': f'Bearer {self.api_key}', - 'x-idempotency-key': idempotency_key - } - - return { - 'Content-Type': 'application/json', - 'Authorization': f'Bearer {self.api_key}', - } - - def _post_request(self, url, data, headers, retries=3, backoff_factor=0.5): - """ - Make a POST request with retries. - - Args: - url (str): The URL to send the POST request to. - data (Dict[str, Any]): The JSON data to include in the POST request. - headers (Dict[str, str]): The headers to include in the POST request. - retries (int): Number of retries for the request. - backoff_factor (float): Backoff factor for retries. - - Returns: - requests.Response: The response from the POST request. - - Raises: - requests.RequestException: If the request fails after the specified retries. - """ - for attempt in range(retries): - response = requests.post(url, headers=headers, json=data) - if response.status_code == 502: - time.sleep(backoff_factor * (2 ** attempt)) - else: - return response - return response - - def _get_request(self, url, headers, retries=3, backoff_factor=0.5): - """ - Make a GET request with retries. - - Args: - url (str): The URL to send the GET request to. - headers (Dict[str, str]): The headers to include in the GET request. - retries (int): Number of retries for the request. - backoff_factor (float): Backoff factor for retries. - - Returns: - requests.Response: The response from the GET request. - - Raises: - requests.RequestException: If the request fails after the specified retries. - """ - for attempt in range(retries): - response = requests.get(url, headers=headers) - if response.status_code == 502: - time.sleep(backoff_factor * (2 ** attempt)) - else: - return response - return response - - def _monitor_job_status(self, job_id, headers, timeout): - """ - Monitor the status of a crawl job until completion. - - Args: - job_id (str): The ID of the crawl job. - headers (Dict[str, str]): The headers to include in the status check requests. - timeout (int): Timeout between status checks. - - Returns: - Any: The crawl results if the job is completed successfully. - - Raises: - Exception: If the job fails or an error occurs during status checks. - """ - while True: - status_response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers) - if status_response.status_code == 200: - status_data = status_response.json() - if status_data['status'] == 'completed': - if 'data' in status_data: - return status_data['data'] - else: - raise Exception('Crawl job completed but no data was returned') - elif status_data['status'] in ['active', 'paused', 'pending', 'queued', 'waiting']: - timeout=max(timeout,2) - time.sleep(timeout) # Wait for the specified timeout before checking again - else: - raise Exception(f'Crawl job failed or was stopped. Status: {status_data["status"]}') - else: - self._handle_error(status_response, 'check crawl status') - - def _handle_error(self, response, action): - """ - Handle errors from API responses. - - Args: - response (requests.Response): The response object from the API request. - action (str): Description of the action that was being performed. - - Raises: - Exception: An exception with a message containing the status code and error details from the response. - """ - if response.status_code in [402, 408, 409, 500]: - error_message = response.json().get('error', 'Unknown error occurred') - raise Exception(f'Failed to {action}. Status code: {response.status_code}. Error: {error_message}') - else: - raise Exception(f'Unexpected error occurred while trying to {action}. Status code: {response.status_code}') diff --git a/apps/python-sdk/dist/firecrawl-py-0.0.12.tar.gz b/apps/python-sdk/dist/firecrawl-py-0.0.12.tar.gz deleted file mode 100644 index 83cd7221..00000000 Binary files a/apps/python-sdk/dist/firecrawl-py-0.0.12.tar.gz and /dev/null differ diff --git a/apps/python-sdk/dist/firecrawl_py-0.0.12-py3-none-any.whl b/apps/python-sdk/dist/firecrawl_py-0.0.12-py3-none-any.whl deleted file mode 100644 index b96c8f48..00000000 Binary files a/apps/python-sdk/dist/firecrawl_py-0.0.12-py3-none-any.whl and /dev/null differ diff --git a/apps/python-sdk/example.py b/apps/python-sdk/example.py index d80fa795..02c06288 100644 --- a/apps/python-sdk/example.py +++ b/apps/python-sdk/example.py @@ -1,7 +1,9 @@ +import time +import nest_asyncio import uuid from firecrawl.firecrawl import FirecrawlApp -app = FirecrawlApp(api_key="fc-YOUR_API_KEY") +app = FirecrawlApp(api_key="fc-") # Scrape a website: scrape_result = app.scrape_url('firecrawl.dev') @@ -9,9 +11,26 @@ print(scrape_result['markdown']) # Crawl a website: idempotency_key = str(uuid.uuid4()) # optional idempotency key -crawl_result = app.crawl_url('mendable.ai', {'crawlerOptions': {'excludes': ['blog/*']}}, True, 2, idempotency_key) +crawl_result = app.crawl_url('firecrawl.dev', {'excludePaths': ['blog/*']}, 2, idempotency_key) print(crawl_result) +# Asynchronous Crawl a website: +async_result = app.async_crawl_url('firecrawl.dev', {'excludePaths': ['blog/*']}, "") +print(async_result) + +crawl_status = app.check_crawl_status(async_result['id']) +print(crawl_status) + +attempts = 15 +while attempts > 0 and crawl_status['status'] != 'completed': + print(crawl_status) + crawl_status = app.check_crawl_status(async_result['id']) + attempts -= 1 + time.sleep(1) + +crawl_status = app.get_crawl_status(async_result['id']) +print(crawl_status) + # LLM Extraction: # Define schema to extract contents into using pydantic from pydantic import BaseModel, Field @@ -27,18 +46,15 @@ class TopArticlesSchema(BaseModel): top: List[ArticleSchema] = Field(..., max_items=5, description="Top 5 stories") llm_extraction_result = app.scrape_url('https://news.ycombinator.com', { - 'extractorOptions': { - 'extractionSchema': TopArticlesSchema.model_json_schema(), - 'mode': 'llm-extraction' - }, - 'pageOptions':{ - 'onlyMainContent': True + 'formats': ['extract'], + 'extract': { + 'schema': TopArticlesSchema.model_json_schema() } }) -print(llm_extraction_result['llm_extraction']) +print(llm_extraction_result['extract']) -# Define schema to extract contents into using json schema +# # Define schema to extract contents into using json schema json_schema = { "type": "object", "properties": { @@ -62,7 +78,10 @@ json_schema = { "required": ["top"] } -llm_extraction_result = app.scrape_url('https://news.ycombinator.com', { +app2 = FirecrawlApp(api_key="fc-", version="v0") + + +llm_extraction_result = app2.scrape_url('https://news.ycombinator.com', { 'extractorOptions': { 'extractionSchema': json_schema, 'mode': 'llm-extraction' @@ -72,4 +91,36 @@ llm_extraction_result = app.scrape_url('https://news.ycombinator.com', { } }) -print(llm_extraction_result['llm_extraction']) \ No newline at end of file +# print(llm_extraction_result['llm_extraction']) + + +# Map a website: +map_result = app.map_url('https://firecrawl.dev', { 'search': 'blog' }) +print(map_result) + +# Crawl a website with WebSockets: +# inside an async function... +nest_asyncio.apply() + +# Define event handlers +def on_document(detail): + print("DOC", detail) + +def on_error(detail): + print("ERR", detail['error']) + +def on_done(detail): + print("DONE", detail['status']) + + # Function to start the crawl and watch process +async def start_crawl_and_watch(): + # Initiate the crawl job and get the watcher + watcher = app.crawl_url_and_watch('firecrawl.dev', { 'excludePaths': ['blog/*'], 'limit': 5 }) + + # Add event listeners + watcher.add_event_listener("document", on_document) + watcher.add_event_listener("error", on_error) + watcher.add_event_listener("done", on_done) + + # Start the watcher + await watcher.connect() diff --git a/apps/python-sdk/firecrawl/__init__.py b/apps/python-sdk/firecrawl/__init__.py index fbb2bdbf..4b3807be 100644 --- a/apps/python-sdk/firecrawl/__init__.py +++ b/apps/python-sdk/firecrawl/__init__.py @@ -13,7 +13,7 @@ import os from .firecrawl import FirecrawlApp -__version__ = "0.0.16" +__version__ = "1.2.1" # Define the logger for the Firecrawl project logger: logging.Logger = logging.getLogger("firecrawl") diff --git a/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py b/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py index 452d4982..8945d74d 100644 --- a/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py +++ b/apps/python-sdk/firecrawl/__tests__/e2e_withAuth/test.py @@ -7,7 +7,7 @@ from dotenv import load_dotenv load_dotenv() -API_URL = "http://127.0.0.1:3002"; +API_URL = "http://127.0.0.1:3002" ABSOLUTE_FIRECRAWL_PATH = "firecrawl/firecrawl.py" TEST_API_KEY = os.getenv('TEST_API_KEY') @@ -20,32 +20,34 @@ FirecrawlApp = firecrawl.FirecrawlApp def test_no_api_key(): with pytest.raises(Exception) as excinfo: - invalid_app = FirecrawlApp(api_url=API_URL) + invalid_app = FirecrawlApp(api_url=API_URL, version='v0') assert "No API key provided" in str(excinfo.value) def test_scrape_url_invalid_api_key(): - invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") + invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key", version='v0') with pytest.raises(Exception) as excinfo: invalid_app.scrape_url('https://firecrawl.dev') assert "Unexpected error during scrape URL: Status code 401. Unauthorized: Invalid token" in str(excinfo.value) def test_blocklisted_url(): blocklisted_url = "https://facebook.com/fake-test" - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') with pytest.raises(Exception) as excinfo: app.scrape_url(blocklisted_url) assert "Unexpected error during scrape URL: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." in str(excinfo.value) def test_successful_response_with_valid_preview_token(): - app = FirecrawlApp(api_url=API_URL, api_key="this_is_just_a_preview_token") + app = FirecrawlApp(api_url=API_URL, api_key="this_is_just_a_preview_token", version='v0') response = app.scrape_url('https://roastmywebsite.ai') assert response is not None assert 'content' in response assert "_Roast_" in response['content'] def test_scrape_url_e2e(): - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') response = app.scrape_url('https://roastmywebsite.ai') + print(response) + assert response is not None assert 'content' in response assert 'markdown' in response @@ -54,7 +56,7 @@ def test_scrape_url_e2e(): assert "_Roast_" in response['content'] def test_successful_response_with_valid_api_key_and_include_html(): - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') response = app.scrape_url('https://roastmywebsite.ai', {'pageOptions': {'includeHtml': True}}) assert response is not None assert 'content' in response @@ -66,7 +68,7 @@ def test_successful_response_with_valid_api_key_and_include_html(): assert " 0 @@ -104,7 +106,7 @@ def test_crawl_url_wait_for_completion_e2e(): assert "_Roast_" in response[0]['content'] def test_crawl_url_with_idempotency_key_e2e(): - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') uniqueIdempotencyKey = str(uuid4()) response = app.crawl_url('https://roastmywebsite.ai', {'crawlerOptions': {'excludes': ['blog/*']}}, True, 2, uniqueIdempotencyKey) assert response is not None @@ -117,7 +119,7 @@ def test_crawl_url_with_idempotency_key_e2e(): assert "Conflict: Failed to start crawl job due to a conflict. Idempotency key already used" in str(excinfo.value) def test_check_crawl_status_e2e(): - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') response = app.crawl_url('https://firecrawl.dev', {'crawlerOptions': {'excludes': ['blog/*']}}, False) assert response is not None assert 'jobId' in response @@ -131,21 +133,21 @@ def test_check_crawl_status_e2e(): assert len(status_response['data']) > 0 def test_search_e2e(): - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') response = app.search("test query") assert response is not None assert 'content' in response[0] assert len(response) > 2 def test_search_invalid_api_key(): - invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") + invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key", version='v0') with pytest.raises(Exception) as excinfo: invalid_app.search("test query") assert "Unexpected error during search: Status code 401. Unauthorized: Invalid token" in str(excinfo.value) def test_llm_extraction(): - app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) - response = app.scrape_url("https://mendable.ai", { + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0') + response = app.scrape_url("https://firecrawl.dev", { 'extractorOptions': { 'mode': 'llm-extraction', 'extractionPrompt': "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", diff --git a/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/.env.example b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/.env.example new file mode 100644 index 00000000..904887bf --- /dev/null +++ b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/.env.example @@ -0,0 +1,3 @@ +API_URL=http://localhost:3002 +ABSOLUTE_FIRECRAWL_PATH=/Users/user/firecrawl/apps/python-sdk/firecrawl/firecrawl.py +TEST_API_KEY=fc-YOUR_API_KEY \ No newline at end of file diff --git a/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/__init__.py b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/__pycache__/test.cpython-311-pytest-8.2.1.pyc b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/__pycache__/test.cpython-311-pytest-8.2.1.pyc new file mode 100644 index 00000000..5ba1f132 Binary files /dev/null and b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/__pycache__/test.cpython-311-pytest-8.2.1.pyc differ diff --git a/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/test.py b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/test.py new file mode 100644 index 00000000..12fa10ce --- /dev/null +++ b/apps/python-sdk/firecrawl/__tests__/v1/e2e_withAuth/test.py @@ -0,0 +1,352 @@ +import importlib.util +import pytest +import time +import os +from uuid import uuid4 +from dotenv import load_dotenv +from datetime import datetime + +load_dotenv() + +API_URL = "http://127.0.0.1:3002"; +ABSOLUTE_FIRECRAWL_PATH = "firecrawl/firecrawl.py" +TEST_API_KEY = os.getenv('TEST_API_KEY') + +print(f"ABSOLUTE_FIRECRAWL_PATH: {ABSOLUTE_FIRECRAWL_PATH}") + +spec = importlib.util.spec_from_file_location("FirecrawlApp", ABSOLUTE_FIRECRAWL_PATH) +firecrawl = importlib.util.module_from_spec(spec) +spec.loader.exec_module(firecrawl) +FirecrawlApp = firecrawl.FirecrawlApp + +def test_no_api_key(): + with pytest.raises(Exception) as excinfo: + invalid_app = FirecrawlApp(api_url=API_URL) + assert "No API key provided" in str(excinfo.value) + +def test_scrape_url_invalid_api_key(): + invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") + with pytest.raises(Exception) as excinfo: + invalid_app.scrape_url('https://firecrawl.dev') + assert "Unauthorized: Invalid token" in str(excinfo.value) + +def test_blocklisted_url(): + blocklisted_url = "https://facebook.com/fake-test" + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + with pytest.raises(Exception) as excinfo: + app.scrape_url(blocklisted_url) + assert "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." in str(excinfo.value) + +def test_successful_response_with_valid_preview_token(): + app = FirecrawlApp(api_url=API_URL, api_key="this_is_just_a_preview_token") + response = app.scrape_url('https://roastmywebsite.ai') + assert response is not None + assert "_Roast_" in response['markdown'] + assert "content" not in response + assert "html" not in response + assert "metadata" in response + assert "links" not in response + assert "rawHtml" not in response + +def test_successful_response_for_valid_scrape(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + response = app.scrape_url('https://roastmywebsite.ai') + assert response is not None + assert 'markdown' in response + assert "_Roast_" in response['markdown'] + assert 'metadata' in response + assert 'content' not in response + assert 'html' not in response + assert 'rawHtml' not in response + assert 'screenshot' not in response + assert 'links' not in response + +def test_successful_response_with_valid_api_key_and_options(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + params = { + 'formats': ['markdown', 'html', 'rawHtml', 'screenshot', 'links'], + 'headers': {'x-key': 'test'}, + 'includeTags': ['h1'], + 'excludeTags': ['h2'], + 'onlyMainContent': True, + 'timeout': 30000, + 'waitFor': 1000 + } + response = app.scrape_url('https://roastmywebsite.ai', params) + assert response is not None + assert 'content' not in response + assert 'markdown' in response + assert 'html' in response + assert 'rawHtml' in response + assert 'screenshot' in response + assert 'links' in response + assert "_Roast_" in response['markdown'] + assert " 0 + assert "https://" in response['links'][0] + assert 'metadata' in response + assert 'title' in response['metadata'] + assert 'description' in response['metadata'] + assert 'keywords' in response['metadata'] + assert 'robots' in response['metadata'] + assert 'ogTitle' in response['metadata'] + assert 'ogDescription' in response['metadata'] + assert 'ogUrl' in response['metadata'] + assert 'ogImage' in response['metadata'] + assert 'ogLocaleAlternate' in response['metadata'] + assert 'ogSiteName' in response['metadata'] + assert 'sourceURL' in response['metadata'] + assert 'statusCode' in response['metadata'] + assert 'pageStatusCode' not in response['metadata'] + assert 'pageError' not in response['metadata'] + assert 'error' not in response['metadata'] + assert response['metadata']['title'] == "Roast My Website" + assert response['metadata']['description'] == "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + assert response['metadata']['keywords'] == "Roast My Website,Roast,Website,GitHub,Firecrawl" + assert response['metadata']['robots'] == "follow, index" + assert response['metadata']['ogTitle'] == "Roast My Website" + assert response['metadata']['ogDescription'] == "Welcome to Roast My Website, the ultimate tool for putting your website through the wringer! This repository harnesses the power of Firecrawl to scrape and capture screenshots of websites, and then unleashes the latest LLM vision models to mercilessly roast them. 🌶️" + assert response['metadata']['ogUrl'] == "https://www.roastmywebsite.ai" + assert response['metadata']['ogImage'] == "https://www.roastmywebsite.ai/og.png" + assert response['metadata']['ogLocaleAlternate'] == [] + assert response['metadata']['ogSiteName'] == "Roast My Website" + assert response['metadata']['sourceURL'] == "https://roastmywebsite.ai" + assert response['metadata']['statusCode'] == 200 + +def test_successful_response_for_valid_scrape_with_pdf_file(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + response = app.scrape_url('https://arxiv.org/pdf/astro-ph/9301001.pdf') + assert response is not None + assert 'content' not in response + assert 'metadata' in response + assert 'We present spectrophotometric observations of the Broad Line Radio Galaxy' in response['markdown'] + +def test_successful_response_for_valid_scrape_with_pdf_file_without_explicit_extension(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + response = app.scrape_url('https://arxiv.org/pdf/astro-ph/9301001') + time.sleep(1) # wait for 1 second + assert response is not None + assert 'We present spectrophotometric observations of the Broad Line Radio Galaxy' in response['markdown'] + +def test_crawl_url_invalid_api_key(): + invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key") + with pytest.raises(Exception) as excinfo: + invalid_app.crawl_url('https://firecrawl.dev') + assert "Unauthorized: Invalid token" in str(excinfo.value) + +def test_should_return_error_for_blocklisted_url(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + blocklisted_url = "https://twitter.com/fake-test" + with pytest.raises(Exception) as excinfo: + app.crawl_url(blocklisted_url) + assert "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." in str(excinfo.value) + +def test_crawl_url_wait_for_completion_e2e(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + response = app.crawl_url('https://roastmywebsite.ai', {'excludePaths': ['blog/*']}, True, 30) + assert response is not None + assert 'total' in response + assert response['total'] > 0 + assert 'creditsUsed' in response + assert response['creditsUsed'] > 0 + assert 'expiresAt' in response + assert datetime.strptime(response['expiresAt'], '%Y-%m-%dT%H:%M:%S.%fZ') > datetime.now() + assert 'status' in response + assert response['status'] == 'completed' + assert 'next' not in response + assert len(response['data']) > 0 + assert 'markdown' in response['data'][0] + assert "_Roast_" in response['data'][0]['markdown'] + assert 'content' not in response['data'][0] + assert 'html' not in response['data'][0] + assert 'rawHtml' not in response['data'][0] + assert 'screenshot' not in response['data'][0] + assert 'links' not in response['data'][0] + assert 'metadata' in response['data'][0] + assert 'title' in response['data'][0]['metadata'] + assert 'description' in response['data'][0]['metadata'] + assert 'language' in response['data'][0]['metadata'] + assert 'sourceURL' in response['data'][0]['metadata'] + assert 'statusCode' in response['data'][0]['metadata'] + assert 'error' not in response['data'][0]['metadata'] + +def test_crawl_url_with_options_and_wait_for_completion(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + response = app.crawl_url('https://roastmywebsite.ai', { + 'excludePaths': ['blog/*'], + 'includePaths': ['/'], + 'maxDepth': 2, + 'ignoreSitemap': True, + 'limit': 10, + 'allowBackwardLinks': True, + 'allowExternalLinks': True, + 'scrapeOptions': { + 'formats': ['markdown', 'html', 'rawHtml', 'screenshot', 'links'], + 'headers': {"x-key": "test"}, + 'includeTags': ['h1'], + 'excludeTags': ['h2'], + 'onlyMainContent': True, + 'waitFor': 1000 + } + }, True, 30) + assert response is not None + assert 'total' in response + assert response['total'] > 0 + assert 'creditsUsed' in response + assert response['creditsUsed'] > 0 + assert 'expiresAt' in response + assert datetime.strptime(response['expiresAt'], '%Y-%m-%dT%H:%M:%S.%fZ') > datetime.now() + assert 'status' in response + assert response['status'] == 'completed' + assert 'next' not in response + assert len(response['data']) > 0 + assert 'markdown' in response['data'][0] + assert "_Roast_" in response['data'][0]['markdown'] + assert 'content' not in response['data'][0] + assert 'html' in response['data'][0] + assert " 0 + assert 'metadata' in response['data'][0] + assert 'title' in response['data'][0]['metadata'] + assert 'description' in response['data'][0]['metadata'] + assert 'language' in response['data'][0]['metadata'] + assert 'sourceURL' in response['data'][0]['metadata'] + assert 'statusCode' in response['data'][0]['metadata'] + assert 'error' not in response['data'][0]['metadata'] + +def test_crawl_url_with_idempotency_key_e2e(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + uniqueIdempotencyKey = str(uuid4()) + response = app.crawl_url('https://roastmywebsite.ai', {'excludePaths': ['blog/*']}, False, 2, uniqueIdempotencyKey) + assert response is not None + assert 'id' in response + + with pytest.raises(Exception) as excinfo: + app.crawl_url('https://firecrawl.dev', {'excludePaths': ['blog/*']}, True, 2, uniqueIdempotencyKey) + assert "Idempotency key already used" in str(excinfo.value) + +def test_check_crawl_status_e2e(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + response = app.crawl_url('https://firecrawl.dev', {'scrapeOptions': {'formats': ['markdown', 'html', 'rawHtml', 'screenshot', 'links']}}, False) + assert response is not None + assert 'id' in response + + max_checks = 15 + checks = 0 + status_response = app.check_crawl_status(response['id']) + + while status_response['status'] == 'scraping' and checks < max_checks: + time.sleep(1) # wait for 1 second + assert 'partial_data' not in status_response + assert 'current' not in status_response + assert 'data' in status_response + assert 'total' in status_response + assert 'creditsUsed' in status_response + assert 'expiresAt' in status_response + assert 'status' in status_response + assert 'next' in status_response + assert status_response['total'] > 0 + assert status_response['creditsUsed'] > 0 + assert datetime.strptime(status_response['expiresAt'], '%Y-%m-%dT%H:%M:%S.%fZ') > datetime.now() + assert status_response['status'] == 'scraping' + assert '/v1/crawl/' in status_response['next'] + status_response = app.check_crawl_status(response['id']) + checks += 1 + + assert status_response is not None + assert 'total' in status_response + assert status_response['total'] > 0 + assert 'creditsUsed' in status_response + assert status_response['creditsUsed'] > 0 + assert 'expiresAt' in status_response + assert datetime.strptime(status_response['expiresAt'], '%Y-%m-%dT%H:%M:%S.%fZ') > datetime.now() + assert 'status' in status_response + assert status_response['status'] == 'completed' + assert len(status_response['data']) > 0 + assert 'markdown' in status_response['data'][0] + assert len(status_response['data'][0]['markdown']) > 10 + assert 'content' not in status_response['data'][0] + assert 'html' in status_response['data'][0] + assert " 0 + assert 'metadata' in status_response['data'][0] + assert 'title' in status_response['data'][0]['metadata'] + assert 'description' in status_response['data'][0]['metadata'] + assert 'language' in status_response['data'][0]['metadata'] + assert 'sourceURL' in status_response['data'][0]['metadata'] + assert 'statusCode' in status_response['data'][0]['metadata'] + assert 'error' not in status_response['data'][0]['metadata'] + +def test_invalid_api_key_on_map(): + invalid_app = FirecrawlApp(api_key="invalid_api_key", api_url=API_URL) + with pytest.raises(Exception) as excinfo: + invalid_app.map_url('https://roastmywebsite.ai') + assert "Unauthorized: Invalid token" in str(excinfo.value) + +def test_blocklisted_url_on_map(): + app = FirecrawlApp(api_key=TEST_API_KEY, api_url=API_URL) + blocklisted_url = "https://facebook.com/fake-test" + with pytest.raises(Exception) as excinfo: + app.map_url(blocklisted_url) + assert "URL is blocked. Firecrawl currently does not support social media scraping due to policy restrictions." in str(excinfo.value) + +def test_successful_response_with_valid_preview_token_on_map(): + app = FirecrawlApp(api_key="this_is_just_a_preview_token", api_url=API_URL) + response = app.map_url('https://roastmywebsite.ai') + assert response is not None + assert len(response) > 0 + +def test_successful_response_for_valid_map(): + app = FirecrawlApp(api_key=TEST_API_KEY, api_url=API_URL) + response = app.map_url('https://roastmywebsite.ai') + assert response is not None + assert len(response) > 0 + assert any("https://" in link for link in response) + filtered_links = [link for link in response if "roastmywebsite.ai" in link] + assert len(filtered_links) > 0 + +def test_search_e2e(): + app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) + with pytest.raises(NotImplementedError) as excinfo: + app.search("test query") + assert "Search is not supported in v1" in str(excinfo.value) + +# def test_llm_extraction(): +# app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY) +# response = app.scrape_url("https://mendable.ai", { +# 'extractorOptions': { +# 'mode': 'llm-extraction', +# 'extractionPrompt': "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source", +# 'extractionSchema': { +# 'type': 'object', +# 'properties': { +# 'company_mission': {'type': 'string'}, +# 'supports_sso': {'type': 'boolean'}, +# 'is_open_source': {'type': 'boolean'} +# }, +# 'required': ['company_mission', 'supports_sso', 'is_open_source'] +# } +# } +# }) +# assert response is not None +# assert 'llm_extraction' in response +# llm_extraction = response['llm_extraction'] +# assert 'company_mission' in llm_extraction +# assert isinstance(llm_extraction['supports_sso'], bool) +# assert isinstance(llm_extraction['is_open_source'], bool) + + + \ No newline at end of file diff --git a/apps/python-sdk/firecrawl/firecrawl.py b/apps/python-sdk/firecrawl/firecrawl.py index 7ec0d33f..75245e8d 100644 --- a/apps/python-sdk/firecrawl/firecrawl.py +++ b/apps/python-sdk/firecrawl/firecrawl.py @@ -12,31 +12,30 @@ Classes: import logging import os import time -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, List +import asyncio +import json import requests +import websockets logger : logging.Logger = logging.getLogger("firecrawl") class FirecrawlApp: - """ - Initialize the FirecrawlApp instance. - - Args: - api_key (Optional[str]): API key for authenticating with the Firecrawl API. - api_url (Optional[str]): Base URL for the Firecrawl API. - """ def __init__(self, api_key: Optional[str] = None, api_url: Optional[str] = None) -> None: - self.api_key = api_key or os.getenv('FIRECRAWL_API_KEY') - if self.api_key is None: - logger.warning("No API key provided") - raise ValueError('No API key provided') - else: - logger.debug("Initialized FirecrawlApp with API key: %s", self.api_key) + """ + Initialize the FirecrawlApp instance with API key, API URL. - self.api_url = api_url or os.getenv('FIRECRAWL_API_URL', 'https://api.firecrawl.dev') - if self.api_url != 'https://api.firecrawl.dev': - logger.debug("Initialized FirecrawlApp with API URL: %s", self.api_url) + Args: + api_key (Optional[str]): API key for authenticating with the Firecrawl API. + api_url (Optional[str]): Base URL for the Firecrawl API. + """ + self.api_key = api_key or os.getenv('FIRECRAWL_API_KEY') + self.api_url = api_url or os.getenv('FIRECRAWL_API_URL', 'https://api.firecrawl.dev') + if self.api_key is None: + logger.warning("No API key provided") + raise ValueError('No API key provided') + logger.debug(f"Initialized FirecrawlApp with API key: {self.api_key}") def scrape_url(self, url: str, params: Optional[Dict[str, Any]] = None) -> Any: """ @@ -60,24 +59,22 @@ class FirecrawlApp: # If there are additional params, process them if params: - # Initialize extractorOptions if present - extractor_options = params.get('extractorOptions', {}) - # Check and convert the extractionSchema if it's a Pydantic model - if 'extractionSchema' in extractor_options: - if hasattr(extractor_options['extractionSchema'], 'schema'): - extractor_options['extractionSchema'] = extractor_options['extractionSchema'].schema() - # Ensure 'mode' is set, defaulting to 'llm-extraction' if not explicitly provided - extractor_options['mode'] = extractor_options.get('mode', 'llm-extraction') - # Update the scrape_params with the processed extractorOptions - scrape_params['extractorOptions'] = extractor_options + # Handle extract (for v1) + extract = params.get('extract', {}) + if extract: + if 'schema' in extract and hasattr(extract['schema'], 'schema'): + extract['schema'] = extract['schema'].schema() + scrape_params['extract'] = extract # Include any other params directly at the top level of scrape_params for key, value in params.items(): - if key != 'extractorOptions': + if key not in ['extract']: scrape_params[key] = value + + endpoint = f'/v1/scrape' # Make the POST request with the prepared headers and JSON data response = requests.post( - f'{self.api_url}/v0/scrape', + f'{self.api_url}{endpoint}', headers=headers, json=scrape_params, ) @@ -102,32 +99,14 @@ class FirecrawlApp: Any: The search results if the request is successful. Raises: + NotImplementedError: If the search request is attempted on API version v1. Exception: If the search request fails. """ - headers = self._prepare_headers() - json_data = {'query': query} - if params: - json_data.update(params) - response = requests.post( - f'{self.api_url}/v0/search', - headers=headers, - json=json_data - ) - if response.status_code == 200: - response = response.json() - - if response['success'] and 'data' in response: - return response['data'] - else: - raise Exception(f'Failed to search. Error: {response["error"]}') - - else: - self._handle_error(response, 'search') + raise NotImplementedError("Search is not supported in v1.") def crawl_url(self, url: str, params: Optional[Dict[str, Any]] = None, - wait_until_done: bool = True, - poll_interval: int = 2, + poll_interval: Optional[int] = 2, idempotency_key: Optional[str] = None) -> Any: """ Initiate a crawl job for the specified URL using the Firecrawl API. @@ -135,8 +114,7 @@ class FirecrawlApp: Args: url (str): The URL to crawl. params (Optional[Dict[str, Any]]): Additional parameters for the crawl request. - wait_until_done (bool): Whether to wait until the crawl job is completed. - poll_interval (int): Time in seconds between status checks when waiting for job completion. + poll_interval (Optional[int]): Time in seconds between status checks when waiting for job completion. Defaults to 2 seconds. idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests. Returns: @@ -145,26 +123,49 @@ class FirecrawlApp: Raises: Exception: If the crawl job initiation or monitoring fails. """ + endpoint = f'/v1/crawl' headers = self._prepare_headers(idempotency_key) json_data = {'url': url} if params: json_data.update(params) - response = self._post_request(f'{self.api_url}/v0/crawl', json_data, headers) + response = self._post_request(f'{self.api_url}{endpoint}', json_data, headers) if response.status_code == 200: - job_id = response.json().get('jobId') - if wait_until_done: - return self._monitor_job_status(job_id, headers, poll_interval) - else: - return {'jobId': job_id} + id = response.json().get('id') + return self._monitor_job_status(id, headers, poll_interval) + else: self._handle_error(response, 'start crawl job') - def check_crawl_status(self, job_id: str) -> Any: + + def async_crawl_url(self, url: str, params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> Dict[str, Any]: + """ + Initiate a crawl job asynchronously. + + Args: + url (str): The URL to crawl. + params (Optional[Dict[str, Any]]): Additional parameters for the crawl request. + idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests. + + Returns: + Dict[str, Any]: The response from the crawl initiation request. + """ + endpoint = f'/v1/crawl' + headers = self._prepare_headers(idempotency_key) + json_data = {'url': url} + if params: + json_data.update(params) + response = self._post_request(f'{self.api_url}{endpoint}', json_data, headers) + if response.status_code == 200: + return response.json() + else: + self._handle_error(response, 'start crawl job') + + def check_crawl_status(self, id: str) -> Any: """ Check the status of a crawl job using the Firecrawl API. Args: - job_id (str): The ID of the crawl job. + id (str): The ID of the crawl job. Returns: Any: The status of the crawl job. @@ -172,13 +173,79 @@ class FirecrawlApp: Raises: Exception: If the status check request fails. """ + endpoint = f'/v1/crawl/{id}' + headers = self._prepare_headers() - response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers) + response = self._get_request(f'{self.api_url}{endpoint}', headers) if response.status_code == 200: - return response.json() + data = response.json() + return { + 'success': True, + 'status': data.get('status'), + 'total': data.get('total'), + 'completed': data.get('completed'), + 'creditsUsed': data.get('creditsUsed'), + 'expiresAt': data.get('expiresAt'), + 'next': data.get('next'), + 'data': data.get('data'), + 'error': data.get('error') + } else: self._handle_error(response, 'check crawl status') + def crawl_url_and_watch(self, url: str, params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> 'CrawlWatcher': + """ + Initiate a crawl job and return a CrawlWatcher to monitor the job via WebSocket. + + Args: + url (str): The URL to crawl. + params (Optional[Dict[str, Any]]): Additional parameters for the crawl request. + idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests. + + Returns: + CrawlWatcher: An instance of CrawlWatcher to monitor the crawl job. + """ + crawl_response = self.async_crawl_url(url, params, idempotency_key) + if crawl_response['success'] and 'id' in crawl_response: + return CrawlWatcher(crawl_response['id'], self) + else: + raise Exception("Crawl job failed to start") + + def map_url(self, url: str, params: Optional[Dict[str, Any]] = None) -> Any: + """ + Perform a map search using the Firecrawl API. + + Args: + url (str): The URL to perform the map search on. + params (Optional[Dict[str, Any]]): Additional parameters for the map search. + + Returns: + Any: The result of the map search, typically a dictionary containing mapping data. + """ + endpoint = f'/v1/map' + headers = self._prepare_headers() + + # Prepare the base scrape parameters with the URL + json_data = {'url': url} + if params: + json_data.update(params) + + # Make the POST request with the prepared headers and JSON data + response = requests.post( + f'{self.api_url}{endpoint}', + headers=headers, + json=json_data, + ) + if response.status_code == 200: + response = response.json() + print(response) + if response['success'] and 'links' in response: + return response['links'] + else: + raise Exception(f'Failed to map URL. Error: {response["error"]}') + else: + self._handle_error(response, 'map') + def _prepare_headers(self, idempotency_key: Optional[str] = None) -> Dict[str, str]: """ Prepare the headers for API requests. @@ -257,15 +324,14 @@ class FirecrawlApp: return response return response - def _monitor_job_status(self, job_id: str, headers: Dict[str, str], poll_interval: int) -> Any: + def _monitor_job_status(self, id: str, headers: Dict[str, str], poll_interval: int) -> Any: """ Monitor the status of a crawl job until completion. Args: - job_id (str): The ID of the crawl job. + id (str): The ID of the crawl job. headers (Dict[str, str]): The headers to include in the status check requests. poll_interval (int): Secounds between status checks. - Returns: Any: The crawl results if the job is completed successfully. @@ -273,15 +339,17 @@ class FirecrawlApp: Exception: If the job fails or an error occurs during status checks. """ while True: - status_response = self._get_request(f'{self.api_url}/v0/crawl/status/{job_id}', headers) + api_url = f'{self.api_url}/v1/crawl/{id}' + + status_response = self._get_request(api_url, headers) if status_response.status_code == 200: status_data = status_response.json() if status_data['status'] == 'completed': if 'data' in status_data: - return status_data['data'] + return status_data else: raise Exception('Crawl job completed but no data was returned') - elif status_data['status'] in ['active', 'paused', 'pending', 'queued', 'waiting']: + elif status_data['status'] in ['active', 'paused', 'pending', 'queued', 'waiting', 'scraping']: poll_interval=max(poll_interval,2) time.sleep(poll_interval) # Wait for the specified interval before checking again else: @@ -300,19 +368,66 @@ class FirecrawlApp: Raises: Exception: An exception with a message containing the status code and error details from the response. """ - error_message = response.json().get('error', 'No additional error details provided.') + error_message = response.json().get('error', 'No error message provided.') + error_details = response.json().get('details', 'No additional error details provided.') if response.status_code == 402: - message = f"Payment Required: Failed to {action}. {error_message}" + message = f"Payment Required: Failed to {action}. {error_message} - {error_details}" elif response.status_code == 408: - message = f"Request Timeout: Failed to {action} as the request timed out. {error_message}" + message = f"Request Timeout: Failed to {action} as the request timed out. {error_message} - {error_details}" elif response.status_code == 409: - message = f"Conflict: Failed to {action} due to a conflict. {error_message}" + message = f"Conflict: Failed to {action} due to a conflict. {error_message} - {error_details}" elif response.status_code == 500: - message = f"Internal Server Error: Failed to {action}. {error_message}" + message = f"Internal Server Error: Failed to {action}. {error_message} - {error_details}" else: - message = f"Unexpected error during {action}: Status code {response.status_code}. {error_message}" + message = f"Unexpected error during {action}: Status code {response.status_code}. {error_message} - {error_details}" # Raise an HTTPError with the custom message and attach the response raise requests.exceptions.HTTPError(message, response=response) - \ No newline at end of file + +class CrawlWatcher: + def __init__(self, id: str, app: FirecrawlApp): + self.id = id + self.app = app + self.data: List[Dict[str, Any]] = [] + self.status = "scraping" + self.ws_url = f"{app.api_url.replace('http', 'ws')}/v1/crawl/{id}" + self.event_handlers = { + 'done': [], + 'error': [], + 'document': [] + } + + async def connect(self): + async with websockets.connect(self.ws_url, extra_headers={"Authorization": f"Bearer {self.app.api_key}"}) as websocket: + await self._listen(websocket) + + async def _listen(self, websocket): + async for message in websocket: + msg = json.loads(message) + await self._handle_message(msg) + + def add_event_listener(self, event_type: str, handler): + if event_type in self.event_handlers: + self.event_handlers[event_type].append(handler) + + def dispatch_event(self, event_type: str, detail: Dict[str, Any]): + if event_type in self.event_handlers: + for handler in self.event_handlers[event_type]: + handler(detail) + + async def _handle_message(self, msg: Dict[str, Any]): + if msg['type'] == 'done': + self.status = 'completed' + self.dispatch_event('done', {'status': self.status, 'data': self.data}) + elif msg['type'] == 'error': + self.status = 'failed' + self.dispatch_event('error', {'status': self.status, 'data': self.data, 'error': msg['error']}) + elif msg['type'] == 'catchup': + self.status = msg['data']['status'] + self.data.extend(msg['data'].get('data', [])) + for doc in self.data: + self.dispatch_event('document', doc) + elif msg['type'] == 'document': + self.data.append(msg['data']) + self.dispatch_event('document', msg['data']) \ No newline at end of file diff --git a/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO b/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO deleted file mode 100644 index 288eb7a5..00000000 --- a/apps/python-sdk/firecrawl_py.egg-info/PKG-INFO +++ /dev/null @@ -1,179 +0,0 @@ -Metadata-Version: 2.1 -Name: firecrawl-py -Version: 0.0.12 -Summary: Python SDK for Firecrawl API -Home-page: https://github.com/mendableai/firecrawl -Author: Mendable.ai -Author-email: nick@mendable.ai -License: GNU General Public License v3 (GPLv3) -Project-URL: Documentation, https://docs.firecrawl.dev -Project-URL: Source, https://github.com/mendableai/firecrawl -Project-URL: Tracker, https://github.com/mendableai/firecrawl/issues -Keywords: SDK API firecrawl -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Internet -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Topic :: Internet :: WWW/HTTP :: Indexing/Search -Classifier: Topic :: Software Development -Classifier: Topic :: Software Development :: Libraries -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing -Classifier: Topic :: Text Processing :: Indexing -Requires-Python: >=3.8 -Description-Content-Type: text/markdown - -# Firecrawl Python SDK - -The Firecrawl Python SDK is a library that allows you to easily scrape and crawl websites, and output the data in a format ready for use with language models (LLMs). It provides a simple and intuitive interface for interacting with the Firecrawl API. - -## Installation - -To install the Firecrawl Python SDK, you can use pip: - -```bash -pip install firecrawl-py -``` - -## Usage - -1. Get an API key from [firecrawl.dev](https://firecrawl.dev) -2. Set the API key as an environment variable named `FIRECRAWL_API_KEY` or pass it as a parameter to the `FirecrawlApp` class. - - -Here's an example of how to use the SDK: - -```python -from firecrawl import FirecrawlApp - -# Initialize the FirecrawlApp with your API key -app = FirecrawlApp(api_key='your_api_key') - -# Scrape a single URL -url = 'https://mendable.ai' -scraped_data = app.scrape_url(url) - -# Crawl a website -crawl_url = 'https://mendable.ai' -params = { - 'pageOptions': { - 'onlyMainContent': True - } -} -crawl_result = app.crawl_url(crawl_url, params=params) -``` - -### Scraping a URL - -To scrape a single URL, use the `scrape_url` method. It takes the URL as a parameter and returns the scraped data as a dictionary. - -```python -url = 'https://example.com' -scraped_data = app.scrape_url(url) -``` -### Extracting structured data from a URL - -With LLM extraction, you can easily extract structured data from any URL. We support pydantic schemas to make it easier for you too. Here is how you to use it: - -```python -class ArticleSchema(BaseModel): - title: str - points: int - by: str - commentsURL: str - -class TopArticlesSchema(BaseModel): - top: List[ArticleSchema] = Field(..., max_items=5, description="Top 5 stories") - -data = app.scrape_url('https://news.ycombinator.com', { - 'extractorOptions': { - 'extractionSchema': TopArticlesSchema.model_json_schema(), - 'mode': 'llm-extraction' - }, - 'pageOptions':{ - 'onlyMainContent': True - } -}) -print(data["llm_extraction"]) -``` - -### Search for a query - -Used to search the web, get the most relevant results, scrap each page and return the markdown. - -```python -query = 'what is mendable?' -search_result = app.search(query) -``` - -### Crawling a Website - -To crawl a website, use the `crawl_url` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. - -The `wait_until_done` parameter determines whether the method should wait for the crawl job to complete before returning the result. If set to `True`, the method will periodically check the status of the crawl job until it is completed or the specified `timeout` (in seconds) is reached. If set to `False`, the method will return immediately with the job ID, and you can manually check the status of the crawl job using the `check_crawl_status` method. - -```python -crawl_url = 'https://example.com' -params = { - 'crawlerOptions': { - 'excludes': ['blog/*'], - 'includes': [], # leave empty for all pages - 'limit': 1000, - }, - 'pageOptions': { - 'onlyMainContent': True - } -} -crawl_result = app.crawl_url(crawl_url, params=params, wait_until_done=True, timeout=5) -``` - -If `wait_until_done` is set to `True`, the `crawl_url` method will return the crawl result once the job is completed. If the job fails or is stopped, an exception will be raised. - -### Checking Crawl Status - -To check the status of a crawl job, use the `check_crawl_status` method. It takes the job ID as a parameter and returns the current status of the crawl job. - -```python -job_id = crawl_result['jobId'] -status = app.check_crawl_status(job_id) -``` - -## Error Handling - -The SDK handles errors returned by the Firecrawl API and raises appropriate exceptions. If an error occurs during a request, an exception will be raised with a descriptive error message. - -## Running the Tests with Pytest - -To ensure the functionality of the Firecrawl Python SDK, we have included end-to-end tests using `pytest`. These tests cover various aspects of the SDK, including URL scraping, web searching, and website crawling. - -### Running the Tests - -To run the tests, execute the following commands: - -Install pytest: -```bash -pip install pytest -``` - -Run: -```bash -pytest firecrawl/__tests__/e2e_withAuth/test.py -``` - - -## Contributing - -Contributions to the Firecrawl Python SDK are welcome! If you find any issues or have suggestions for improvements, please open an issue or submit a pull request on the GitHub repository. - -## License - -The Firecrawl Python SDK is open-source and released under the [MIT License](https://opensource.org/licenses/MIT). diff --git a/apps/python-sdk/firecrawl_py.egg-info/SOURCES.txt b/apps/python-sdk/firecrawl_py.egg-info/SOURCES.txt deleted file mode 100644 index c25567c5..00000000 --- a/apps/python-sdk/firecrawl_py.egg-info/SOURCES.txt +++ /dev/null @@ -1,9 +0,0 @@ -README.md -setup.py -firecrawl/__init__.py -firecrawl/firecrawl.py -firecrawl_py.egg-info/PKG-INFO -firecrawl_py.egg-info/SOURCES.txt -firecrawl_py.egg-info/dependency_links.txt -firecrawl_py.egg-info/requires.txt -firecrawl_py.egg-info/top_level.txt \ No newline at end of file diff --git a/apps/python-sdk/firecrawl_py.egg-info/dependency_links.txt b/apps/python-sdk/firecrawl_py.egg-info/dependency_links.txt deleted file mode 100644 index 8b137891..00000000 --- a/apps/python-sdk/firecrawl_py.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/apps/python-sdk/firecrawl_py.egg-info/requires.txt b/apps/python-sdk/firecrawl_py.egg-info/requires.txt deleted file mode 100644 index c8d341f5..00000000 --- a/apps/python-sdk/firecrawl_py.egg-info/requires.txt +++ /dev/null @@ -1,3 +0,0 @@ -requests -pytest -python-dotenv diff --git a/apps/python-sdk/firecrawl_py.egg-info/top_level.txt b/apps/python-sdk/firecrawl_py.egg-info/top_level.txt deleted file mode 100644 index 8bce1a1f..00000000 --- a/apps/python-sdk/firecrawl_py.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -firecrawl diff --git a/apps/python-sdk/pyproject.toml b/apps/python-sdk/pyproject.toml index 0a732c43..969fb051 100644 --- a/apps/python-sdk/pyproject.toml +++ b/apps/python-sdk/pyproject.toml @@ -10,6 +10,10 @@ readme = {file="README.md", content-type = "text/markdown"} requires-python = ">=3.8" dependencies = [ "requests", + "python-dotenv", + "websockets", + "asyncio", +"nest-asyncio" ] authors = [{name = "Mendable.ai",email = "nick@mendable.ai"}] maintainers = [{name = "Mendable.ai",email = "nick@mendable.ai"}] diff --git a/apps/python-sdk/requirements.txt b/apps/python-sdk/requirements.txt index 1bed5881..94971fde 100644 --- a/apps/python-sdk/requirements.txt +++ b/apps/python-sdk/requirements.txt @@ -1,3 +1,6 @@ requests pytest -python-dotenv \ No newline at end of file +python-dotenv +websockets +asyncio +nest-asyncio \ No newline at end of file diff --git a/apps/python-sdk/setup.py b/apps/python-sdk/setup.py index 4978559b..8a67d1fd 100644 --- a/apps/python-sdk/setup.py +++ b/apps/python-sdk/setup.py @@ -30,6 +30,9 @@ setup( 'requests', 'pytest', 'python-dotenv', + 'websockets', + 'asyncio', + 'nest-asyncio' ], python_requires=">=3.8", classifiers=[ diff --git a/apps/redis/.dockerignore b/apps/redis/.dockerignore new file mode 100644 index 00000000..860aa7ad --- /dev/null +++ b/apps/redis/.dockerignore @@ -0,0 +1,2 @@ +.git +fly.toml diff --git a/apps/redis/Dockerfile b/apps/redis/Dockerfile new file mode 100644 index 00000000..77ea66ae --- /dev/null +++ b/apps/redis/Dockerfile @@ -0,0 +1,6 @@ +ARG REDIS_VERSION=7.2.5 +FROM bitnami/redis:${REDIS_VERSION} + +COPY start-redis-server.sh /usr/bin/start-redis-server.sh + +CMD ["/usr/bin/start-redis-server.sh"] diff --git a/apps/redis/Procfile b/apps/redis/Procfile new file mode 100644 index 00000000..8f661345 --- /dev/null +++ b/apps/redis/Procfile @@ -0,0 +1,2 @@ +redis: /usr/bin/start-redis-server.sh +metrics: /usr/local/bin/redis_exporter -redis.addr localhost:6379 -web.listen-address ":9091" diff --git a/apps/redis/README.md b/apps/redis/README.md new file mode 100644 index 00000000..7d2bcabd --- /dev/null +++ b/apps/redis/README.md @@ -0,0 +1,48 @@ +The official repository for Running Redis on Fly.io. Find the accompanying Docker image at [flyio/redis](https://hub.docker.com/repository/docker/flyio/redis). + +## Usage + +This installation requires setting a password on Redis. To do that, run `fly secrets set REDIS_PASSWORD=mypassword` before deploying. Keep +track of this password - it won't be visible again after deployment! + +If you need no customizations, you can deploy using the official Docker image. See `fly.toml` in this repository for an example to get started with. +## Runtime requirements + +By default, this Redis installation will only accept connections on the private IPv6 network, on the standard port 6379. + +If you want to access it from the public internet, add a `[[services]]` section to your `fly.toml`. An example is included in this repo for accessing Redis on port 10000. + + +We recommend adding persistent storage for Redis data. If you skip this step, data will be lost across deploys or restarts. For Fly apps, the volume needs to be in the same region as the app instances. For example: + +```cmd +flyctl volumes create redis_server --region ord +``` +```out + Name: redis_server + Region: ord + Size GB: 10 +Created at: 02 Nov 20 19:55 UTC +``` + +To connect this volume to the app, `fly.toml` includes a `[mounts]` entry. + +``` +[mounts] +source = "redis_server" +destination = "/data" +``` + +When the app starts, that volume will be mounted on /data. + +## Cutting a release + +If you have write access to this repo, you can ship a prerelease or full release with: + +``` +scripts/bump_version.sh +``` +or +``` +scripts/bump_version.sh prerel +``` diff --git a/apps/redis/fly.toml b/apps/redis/fly.toml new file mode 100644 index 00000000..1bcd05fb --- /dev/null +++ b/apps/redis/fly.toml @@ -0,0 +1,22 @@ +app = 'firecrawl-dragonfly' +primary_region = 'iad' + +[[mounts]] + source = 'firecrawl_redis' + destination = '/data' + +[[services]] + protocol = 'tcp' + internal_port = 6379 + + [[services.tcp_checks]] + interval = '10s' + timeout = '2s' + +[[vm]] + size = 'performance-4x' + memory = '32gb' + +[[metrics]] + port = 9091 + path = '/metrics' diff --git a/apps/redis/scripts/bump_version.sh b/apps/redis/scripts/bump_version.sh new file mode 100755 index 00000000..4a82c00d --- /dev/null +++ b/apps/redis/scripts/bump_version.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash + +set -euo pipefail + +ORIGIN=${ORIGIN:-origin} + +bump=${1:-patch} + +prerel=${2:-none} + +if [[ $bump == "prerel" ]]; then + bump="patch" + prerel="prerel" +fi + +if [[ $(git status --porcelain) != "" ]]; then + echo "Error: repo is dirty. Run git status, clean repo and try again." + exit 1 +elif [[ $(git status --porcelain -b | grep -e "ahead" -e "behind") != "" ]]; then + echo "Error: repo has unpushed commits. Push commits to remote and try again." + exit 1 +fi + +BRANCH="$(git rev-parse --abbrev-ref HEAD)" +if [[ "$prerel" == "prerel" && "$BRANCH" != "prerelease" ]]; then +# echo "❌ Sorry, you can only cut a pre-release from the 'prelease' branch" +# echo "Run 'git checkout prerelease && git pull origin prerelease' and try again." +# exit 1 + echo "⚠️ Pre-releases should be cut from the 'prerelease' branch" + echo "Please make sure you're not overwriting someone else's prerelease!" + echo + read -p "Release anyway? " -n 1 -r + echo + if [[ $REPLY =~ ^[^Yy]$ ]]; then + echo Aborting. + exit 1 + fi +fi + +if [[ "$prerel" != "prerel" && "$BRANCH" != "main" ]]; then + echo "❌ Sorry, you can only cut a release from the 'main' branch" + echo "Run 'git checkout main && git pull origin main' and try again." + exit 1 +fi + +git fetch +if [[ "$(git rev-parse HEAD 2>&1)" != "$(git rev-parse '@{u}' 2>&1)" ]]; then + echo "There are upstream commits that won't be included in this release." + echo "You probably want to exit, run 'git pull', then release." + echo + read -p "Release anyway? " -n 1 -r + echo + if [[ $REPLY =~ ^[^Yy]$ ]]; then + echo Aborting. + exit 1 + fi +fi + +dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +previous_version="$("$dir"/../scripts/version.sh -s)" + +if [[ $prerel == "prerel" ]]; then + prerelversion=$("$dir"/../scripts/semver get prerel "$previous_version") + if [[ $prerelversion == "" ]]; then + new_version=$("$dir"/../scripts/semver bump "$bump" "$previous_version") + new_version=$("$dir"/../scripts/semver bump prerel pre-1 "$new_version") + else + prerel=pre-$((${prerelversion#pre-} + 1)) + new_version=$("$dir"/../scripts/semver bump prerel "$prerel" "$previous_version") + fi +else + prerelversion=$("$dir"/../scripts/semver get prerel "$previous_version") + if [[ $prerelversion == "" ]]; then + new_version=$("$dir"/../scripts/semver bump "$bump" "$previous_version") + else + new_version=${previous_version//-$prerelversion/} + fi +fi + +new_version="v$new_version" + +echo "Bumping version from v${previous_version} to ${new_version}" + +read -p "Are you sure? " -n 1 -r +echo +if [[ $REPLY =~ ^[Yy]$ ]] +then + git tag -m "release ${new_version}" -a "$new_version" && git push "${ORIGIN}" tag "$new_version" + echo "done" +fi diff --git a/apps/redis/scripts/semver b/apps/redis/scripts/semver new file mode 100755 index 00000000..674229e0 --- /dev/null +++ b/apps/redis/scripts/semver @@ -0,0 +1,200 @@ +#!/usr/bin/env bash + +set -o errexit -o nounset -o pipefail + +SEMVER_REGEX="^[vV]?(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)(\\-[0-9A-Za-z-]+(\\.[0-9A-Za-z-]+)*)?(\\+[0-9A-Za-z-]+(\\.[0-9A-Za-z-]+)*)?$" + +PROG=semver +PROG_VERSION=2.1.0 + +USAGE="\ +Usage: + $PROG bump (major|minor|patch|release|prerel |build ) + $PROG compare + $PROG get (major|minor|patch|release|prerel|build) + $PROG --help + $PROG --version + +Arguments: + A version must match the following regex pattern: + \"${SEMVER_REGEX}\". + In english, the version must match X.Y.Z(-PRERELEASE)(+BUILD) + where X, Y and Z are positive integers, PRERELEASE is an optional + string composed of alphanumeric characters and hyphens and + BUILD is also an optional string composed of alphanumeric + characters and hyphens. + + See definition. + + String that must be composed of alphanumeric characters and hyphens. + + String that must be composed of alphanumeric characters and hyphens. + +Options: + -v, --version Print the version of this tool. + -h, --help Print this help message. + +Commands: + bump Bump by one of major, minor, patch, prerel, build + or a forced potentially conflicting version. The bumped version is + shown to stdout. + + compare Compare with , output to stdout the + following values: -1 if is newer, 0 if equal, 1 if + older. + + get Extract given part of , where part is one of major, minor, + patch, prerel, build." + +function error { + echo -e "$1" >&2 + exit 1 +} + +function usage-help { + error "$USAGE" +} + +function usage-version { + echo -e "${PROG}: $PROG_VERSION" + exit 0 +} + +function validate-version { + local version=$1 + if [[ "$version" =~ $SEMVER_REGEX ]]; then + # if a second argument is passed, store the result in var named by $2 + if [ "$#" -eq "2" ]; then + local major=${BASH_REMATCH[1]} + local minor=${BASH_REMATCH[2]} + local patch=${BASH_REMATCH[3]} + local prere=${BASH_REMATCH[4]} + local build=${BASH_REMATCH[6]} + eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")" + else + echo "$version" + fi + else + error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information." + fi +} + +function compare-version { + validate-version "$1" V + validate-version "$2" V_ + + # MAJOR, MINOR and PATCH should compare numerically + for i in 0 1 2; do + local diff=$((${V[$i]} - ${V_[$i]})) + if [[ $diff -lt 0 ]]; then + echo -1; return 0 + elif [[ $diff -gt 0 ]]; then + echo 1; return 0 + fi + done + + # PREREL should compare with the ASCII order. + if [[ -z "${V[3]}" ]] && [[ -n "${V_[3]}" ]]; then + echo 1; return 0; + elif [[ -n "${V[3]}" ]] && [[ -z "${V_[3]}" ]]; then + echo -1; return 0; + elif [[ -n "${V[3]}" ]] && [[ -n "${V_[3]}" ]]; then + if [[ "${V[3]}" > "${V_[3]}" ]]; then + echo 1; return 0; + elif [[ "${V[3]}" < "${V_[3]}" ]]; then + echo -1; return 0; + fi + fi + + echo 0 +} + +function command-bump { + local new; local version; local sub_version; local command; + + case $# in + 2) case $1 in + major|minor|patch|release) command=$1; version=$2;; + *) usage-help;; + esac ;; + 3) case $1 in + prerel|build) command=$1; sub_version=$2 version=$3 ;; + *) usage-help;; + esac ;; + *) usage-help;; + esac + + validate-version "$version" parts + # shellcheck disable=SC2154 + local major="${parts[0]}" + local minor="${parts[1]}" + local patch="${parts[2]}" + local prere="${parts[3]}" + local build="${parts[4]}" + + case "$command" in + major) new="$((major + 1)).0.0";; + minor) new="${major}.$((minor + 1)).0";; + patch) new="${major}.${minor}.$((patch + 1))";; + release) new="${major}.${minor}.${patch}";; + prerel) new=$(validate-version "${major}.${minor}.${patch}-${sub_version}");; + build) new=$(validate-version "${major}.${minor}.${patch}${prere}+${sub_version}");; + *) usage-help ;; + esac + + echo "$new" + exit 0 +} + +function command-compare { + local v; local v_; + + case $# in + 2) v=$(validate-version "$1"); v_=$(validate-version "$2") ;; + *) usage-help ;; + esac + + compare-version "$v" "$v_" + exit 0 +} + + +# shellcheck disable=SC2034 +function command-get { + local part version + + if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then + usage-help + exit 0 + fi + + part="$1" + version="$2" + + validate-version "$version" parts + local major="${parts[0]}" + local minor="${parts[1]}" + local patch="${parts[2]}" + local prerel="${parts[3]:1}" + local build="${parts[4]:1}" + + case "$part" in + major|minor|patch|release|prerel|build) echo "${!part}" ;; + *) usage-help ;; + esac + + exit 0 +} + +case $# in + 0) echo "Unknown command: $*"; usage-help;; +esac + +case $1 in + --help|-h) echo -e "$USAGE"; exit 0;; + --version|-v) usage-version ;; + bump) shift; command-bump "$@";; + get) shift; command-get "$@";; + compare) shift; command-compare "$@";; + *) echo "Unknown arguments: $*"; usage-help;; +esac diff --git a/apps/redis/scripts/version.sh b/apps/redis/scripts/version.sh new file mode 100755 index 00000000..0d3d9875 --- /dev/null +++ b/apps/redis/scripts/version.sh @@ -0,0 +1,5 @@ +ORIGIN=${ORIGIN:-origin} + +version=$(git fetch --tags "${ORIGIN}" &>/dev/null | git -c "versionsort.prereleasesuffix=-pre" tag -l --sort=version:refname | grep -v dev | grep -vE '^v2$' | grep -vE '^v1$' | tail -n1 | cut -c 2-) + +echo "$version" diff --git a/apps/redis/start-redis-server.sh b/apps/redis/start-redis-server.sh new file mode 100755 index 00000000..ed252fde --- /dev/null +++ b/apps/redis/start-redis-server.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +set -e + +sysctl vm.overcommit_memory=1 || true +sysctl net.core.somaxconn=1024 || true + +PW_ARG="" +if [[ ! -z "${REDIS_PASSWORD}" ]]; then + PW_ARG="--requirepass $REDIS_PASSWORD" +fi + +# Set maxmemory-policy to 'allkeys-lru' for caching servers that should always evict old keys +: ${MAXMEMORY_POLICY:="volatile-lru"} +: ${APPENDONLY:="no"} +: ${FLY_VM_MEMORY_MB:=512} +if [ "${NOSAVE}" = "" ] ; then + : ${SAVE:="3600 1 300 100 60 10000"} +fi +# Set maxmemory to 10% of available memory +MAXMEMORY=$(($FLY_VM_MEMORY_MB*80/100)) + +mkdir /data/redis + +redis-server $PW_ARG \ + --dir /data/redis \ + --maxmemory "${MAXMEMORY}mb" \ + --maxmemory-policy $MAXMEMORY_POLICY \ + --appendonly $APPENDONLY \ + --save "$SAVE" diff --git a/apps/rust-sdk/.gitignore b/apps/rust-sdk/.gitignore new file mode 100644 index 00000000..2f7896d1 --- /dev/null +++ b/apps/rust-sdk/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/apps/rust-sdk/CHANGELOG.md b/apps/rust-sdk/CHANGELOG.md new file mode 100644 index 00000000..8342b9fa --- /dev/null +++ b/apps/rust-sdk/CHANGELOG.md @@ -0,0 +1,7 @@ +## CHANGELOG + +## [0.1] + +### Added + +- [feat] Firecrawl rust sdk. diff --git a/apps/rust-sdk/Cargo.lock b/apps/rust-sdk/Cargo.lock new file mode 100644 index 00000000..c2b71d6d --- /dev/null +++ b/apps/rust-sdk/Cargo.lock @@ -0,0 +1,1999 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81ce3d38065e618af2d7b77e10c5ad9a069859b4be3c2250f674af3840d9c8a5" +dependencies = [ + "memchr", +] + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.3.0", +] + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "backtrace" +version = "0.3.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" + +[[package]] +name = "cc" +version = "1.0.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5208975e568d83b6b05cc0a063c8e7e9acc2b43bee6da15616a5b73e109d7437" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clippy" +version = "0.0.302" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d911ee15579a3f50880d8c1d59ef6e79f9533127a3bd342462f5d584f5e8c294" +dependencies = [ + "term 0.5.2", +] + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "crossbeam-utils" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "dirs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" +dependencies = [ + "libc", + "redox_users", + "winapi 0.3.9", +] + +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_logger" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" +dependencies = [ + "log 0.3.9", + "regex", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "extprim" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b1a357c911c352439b460d7b375b5c85977b9db395b703dfee5a94dfb4d66a2" +dependencies = [ + "num-traits", + "rand", + "rustc_version", + "semver", + "serde", +] + +[[package]] +name = "fastrand" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + +[[package]] +name = "firecrawl" +version = "0.1.0" +dependencies = [ + "assert_matches", + "clippy", + "dotenv", + "log 0.4.22", + "reqwest", + "rustfmt", + "serde", + "serde_json", + "thiserror", + "tokio", + "uuid", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-core", + "futures-io", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getopts" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" + +[[package]] +name = "h2" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" + +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg 1.3.0", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" +dependencies = [ + "log 0.4.22", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.48.0", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log 0.4.22", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg 1.3.0", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "081b846d1d56ddfc18fdf1a922e4f6e07a11768ea1b92dec44e42b72712ccfce" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl" +version = "0.10.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.2", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.8", + "libc", + "rand_chacha", + "rand_core 0.4.2", + "rand_hc", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg", + "rand_xorshift", + "winapi 0.3.9", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.3.1", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi 0.3.9", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi 0.3.9", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.4.2", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "redox_syscall" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom 0.1.16", + "redox_syscall 0.1.57", + "rust-argon2", +] + +[[package]] +name = "regex" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9329abc99e39129fcceabd24cf5d85b4671ef7c29c50e972bc5afe32438ec384" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", + "utf8-ranges", +] + +[[package]] +name = "regex-syntax" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d707a4fa2637f2dca2ef9fd02225ec7661fe01a53623c1e6515b6916511f7a7" +dependencies = [ + "ucd-util", +] + +[[package]] +name = "reqwest" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log 0.4.22", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rust-argon2" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" +dependencies = [ + "base64 0.13.1", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver", +] + +[[package]] +name = "rustfmt" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec940eed814db0fb7ab928c5f5025f97dc55d1c0e345e39dda2ce9f945557500" +dependencies = [ + "diff", + "env_logger", + "getopts", + "kernel32-sys", + "libc", + "log 0.3.9", + "regex", + "serde", + "serde_derive", + "serde_json", + "strings", + "syntex_errors", + "syntex_syntax", + "term 0.4.6", + "toml", + "unicode-segmentation", + "winapi 0.2.8", +] + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.23.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" + +[[package]] +name = "rustls-webpki" +version = "0.102.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a6fccd794a42c2c105b513a2f62bc3fd8f3ba57a4593677ceb0bd035164d78" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + +[[package]] +name = "serde" +version = "1.0.204" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.204" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.120" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg 1.3.0", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "strings" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa481ee1bc42fc3df8195f91f7cb43cf8f2b71b48bac40bf5381cfaf7e481f3c" +dependencies = [ + "log 0.3.9", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "201fcda3845c23e8212cd466bfebf0bd20694490fc0356ae8e428e0824a915a6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + +[[package]] +name = "syntex_errors" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3133289179676c9f5c5b2845bf5a2e127769f4889fcbada43035ef6bd662605e" +dependencies = [ + "libc", + "serde", + "serde_derive", + "syntex_pos", + "term 0.4.6", + "unicode-xid", +] + +[[package]] +name = "syntex_pos" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ab669fa003d208c681f874bbc76d91cc3d32550d16b5d9d2087cf477316470" +dependencies = [ + "serde", + "serde_derive", +] + +[[package]] +name = "syntex_syntax" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03815b9f04d95828770d9c974aa39c6e1f6ef3114eb77a3ce09008a0d15dd142" +dependencies = [ + "bitflags 0.9.1", + "extprim", + "log 0.3.9", + "serde", + "serde_derive", + "serde_json", + "syntex_errors", + "syntex_pos", + "unicode-xid", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "term" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" +dependencies = [ + "kernel32-sys", + "winapi 0.2.8", +] + +[[package]] +name = "term" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" +dependencies = [ + "byteorder", + "dirs", + "winapi 0.3.9", +] + +[[package]] +name = "thiserror" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tinyvec" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6b6a2fb3a985e99cebfaefa9faa3024743da73304ca1c683a36429613d3d22" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" +dependencies = [ + "serde", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "ucd-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abd2fc5d32b590614af8b0a20d837f32eca055edd0bbead59a9cfe80858be003" + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8-ranges" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcfc827f90e53a02eaef5e535ee14266c1d569214c6aa70133a624d8a3164ba" + +[[package]] +name = "uuid" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +dependencies = [ + "getrandom 0.2.15", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log 0.4.22", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/apps/rust-sdk/Cargo.toml b/apps/rust-sdk/Cargo.toml new file mode 100644 index 00000000..685545e2 --- /dev/null +++ b/apps/rust-sdk/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "firecrawl" +author="Mendable.ai" +version = "0.1.0" +edition = "2021" +license = "GPL-2.0-or-later" +homepage = "https://www.firecrawl.dev/" +repository ="https://github.com/mendableai/firecrawl" +description = "Rust SDK for Firecrawl API." +authors = ["sanix-darker "] + +[lib] +path = "src/lib.rs" +name = "firecrawl" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +reqwest = { version = "^0.12", features = ["json", "blocking"] } +serde = { version = "^1.0", features = ["derive"] } +serde_json = "^1.0" +log = "^0.4" +thiserror = "^1.0" +uuid = { version = "^1.10", features = ["v4"] } +tokio = { version = "^1", features = ["full"] } + +[dev-dependencies] +clippy = "^0.0.302" +rustfmt = "^0.10" +assert_matches = "^1.5" +dotenv = "^0.15" +tokio = { version = "1", features = ["full"] } + +[build-dependencies] +tokio = { version = "1", features = ["full"] } diff --git a/apps/rust-sdk/README.md b/apps/rust-sdk/README.md new file mode 100644 index 00000000..54ad9097 --- /dev/null +++ b/apps/rust-sdk/README.md @@ -0,0 +1,181 @@ +# Firecrawl Rust SDK + +The Firecrawl Rust SDK is a library that allows you to easily scrape and crawl websites, and output the data in a format ready for use with language models (LLMs). It provides a simple and intuitive interface for interacting with the Firecrawl API. + +## Installation + +To install the Firecrawl Rust SDK, add the following to your `Cargo.toml`: + +```toml +[dependencies] +firecrawl = "^0.1" +tokio = { version = "^1", features = ["full"] } +serde = { version = "^1.0", features = ["derive"] } +serde_json = "^1.0" +uuid = { version = "^1.10", features = ["v4"] } + +[build-dependencies] +tokio = { version = "1", features = ["full"] } +``` + +To add it in your codebase. + +## Usage + +1. Get an API key from [firecrawl.dev](https://firecrawl.dev) +2. Set the API key as an environment variable named `FIRECRAWL_API_KEY` or pass it as a parameter to the `FirecrawlApp` struct. + +Here's an example of how to use the SDK in [example.rs](./examples/example.rs): +All below example can start with : +```rust +use firecrawl::FirecrawlApp; + +#[tokio::main] +async fn main() { + // Initialize the FirecrawlApp with the API key + let api_key = ...; + let api_url = ...; + let app = FirecrawlApp::new(api_key, api_url).expect("Failed to initialize FirecrawlApp"); + + // your code here... +} +``` + +### Scraping a URL + +To scrape a single URL, use the `scrape_url` method. It takes the URL as a parameter and returns the scraped data as a `serde_json::Value`. + +```rust +// Example scrape code... +let scrape_result = app.scrape_url("https://example.com", None).await; +match scrape_result { + Ok(data) => println!("Scrape Result:\n{}", data["markdown"]), + Err(e) => eprintln!("Scrape failed: {}", e), +} +``` + +### Extracting structured data from a URL + +With LLM extraction, you can easily extract structured data from any URL. We support Serde for JSON schema validation to make it easier for you too. Here is how you use it: + +```rust +let json_schema = json!({ + "type": "object", + "properties": { + "top": { + "type": "array", + "items": { + "type": "object", + "properties": { + "title": {"type": "string"}, + "points": {"type": "number"}, + "by": {"type": "string"}, + "commentsURL": {"type": "string"} + }, + "required": ["title", "points", "by", "commentsURL"] + }, + "minItems": 5, + "maxItems": 5, + "description": "Top 5 stories on Hacker News" + } + }, + "required": ["top"] +}); + +let llm_extraction_params = json!({ + "extractorOptions": { + "extractionSchema": json_schema, + "mode": "llm-extraction" + }, + "pageOptions": { + "onlyMainContent": true + } +}); + +// Example scrape code... +let llm_extraction_result = app + .scrape_url("https://news.ycombinator.com", Some(llm_extraction_params)) + .await; +match llm_extraction_result { + Ok(data) => println!("LLM Extraction Result:\n{}", data["llm_extraction"]), + Err(e) => eprintln!("LLM Extraction failed: {}", e), +} +``` + +### Search for a query + +Used to search the web, get the most relevant results, scrape each page, and return the markdown. + +```rust +// Example query search code... +let query = "what is mendable?"; +let search_result = app.search(query).await; +match search_result { + Ok(data) => println!("Search Result:\n{}", data), + Err(e) => eprintln!("Search failed: {}", e), +} +``` + +### Crawling a Website + +To crawl a website, use the `crawl_url` method. It takes the starting URL and optional parameters as arguments. The `params` argument allows you to specify additional options for the crawl job, such as the maximum number of pages to crawl, allowed domains, and the output format. + +The `wait_until_done` parameter determines whether the method should wait for the crawl job to complete before returning the result. If set to `true`, the method will periodically check the status of the crawl job until it is completed or the specified `timeout` (in seconds) is reached. If set to `false`, the method will return immediately with the job ID, and you can manually check the status of the crawl job using the `check_crawl_status` method. + +```rust +let random_uuid = String::from(Uuid::new_v4()); +let idempotency_key = Some(random_uuid); // optional idempotency key +let crawl_params = json!({ + "crawlerOptions": { + "excludes": ["blog/*"] + } +}); + +// Example crawl code... +let crawl_result = app + .crawl_url("https://example.com", Some(crawl_params), true, 2, idempotency_key) + .await; +match crawl_result { + Ok(data) => println!("Crawl Result:\n{}", data), + Err(e) => eprintln!("Crawl failed: {}", e), +} +``` + +If `wait_until_done` is set to `true`, the `crawl_url` method will return the crawl result once the job is completed. If the job fails or is stopped, an exception will be raised. + +### Checking Crawl Status + +To check the status of a crawl job, use the `check_crawl_status` method. It takes the job ID as a parameter and returns the current status of the crawl job. + +```rust +let job_id = crawl_result["jobId"].as_str().expect("Job ID not found"); +let status = app.check_crawl_status(job_id).await; +match status { + Ok(data) => println!("Crawl Status:\n{}", data), + Err(e) => eprintln!("Failed to check crawl status: {}", e), +} +``` + +## Error Handling + +The SDK handles errors returned by the Firecrawl API and raises appropriate exceptions. If an error occurs during a request, an exception will be raised with a descriptive error message. + +## Running the Tests with Cargo + +To ensure the functionality of the Firecrawl Rust SDK, we have included end-to-end tests using `cargo`. These tests cover various aspects of the SDK, including URL scraping, web searching, and website crawling. + +### Running the Tests + +To run the tests, execute the following commands: +```bash +$ export $(xargs < ./tests/.env) +$ cargo test --test e2e_with_auth +``` + +## Contributing + +Contributions to the Firecrawl Rust SDK are welcome! If you find any issues or have suggestions for improvements, please open an issue or submit a pull request on the GitHub repository. + +## License + +The Firecrawl Rust SDK is open-source and released under the [AGPL License](https://www.gnu.org/licenses/agpl-3.0.en.html). diff --git a/apps/rust-sdk/examples/example.rs b/apps/rust-sdk/examples/example.rs new file mode 100644 index 00000000..c6b96b78 --- /dev/null +++ b/apps/rust-sdk/examples/example.rs @@ -0,0 +1,82 @@ +use firecrawl::FirecrawlApp; +use serde_json::json; +use uuid::Uuid; + +#[tokio::main] +async fn main() { + // Initialize the FirecrawlApp with the API key + let api_key = Some("fc-YOUR_API_KEY".to_string()); + let api_url = Some("http://0.0.0.0:3002".to_string()); + let app = FirecrawlApp::new(api_key, api_url).expect("Failed to initialize FirecrawlApp"); + + // Scrape a website + let scrape_result = app.scrape_url("https://firecrawl.dev", None).await; + match scrape_result { + Ok(data) => println!("Scrape Result:\n{}", data["markdown"]), + Err(e) => eprintln!("Scrape failed: {}", e), + } + + // Crawl a website + let random_uuid = String::from(Uuid::new_v4()); + let idempotency_key = Some(random_uuid); // optional idempotency key + let crawl_params = json!({ + "crawlerOptions": { + "excludes": ["blog/*"] + } + }); + let crawl_result = app + .crawl_url( + "https://mendable.ai", + Some(crawl_params), + true, + 2, + idempotency_key, + ) + .await; + match crawl_result { + Ok(data) => println!("Crawl Result:\n{}", data), + Err(e) => eprintln!("Crawl failed: {}", e), + } + + // LLM Extraction with a JSON schema + let json_schema = json!({ + "type": "object", + "properties": { + "top": { + "type": "array", + "items": { + "type": "object", + "properties": { + "title": {"type": "string"}, + "points": {"type": "number"}, + "by": {"type": "string"}, + "commentsURL": {"type": "string"} + }, + "required": ["title", "points", "by", "commentsURL"] + }, + "minItems": 5, + "maxItems": 5, + "description": "Top 5 stories on Hacker News" + } + }, + "required": ["top"] + }); + + let llm_extraction_params = json!({ + "extractorOptions": { + "extractionSchema": json_schema, + "mode": "llm-extraction" + }, + "pageOptions": { + "onlyMainContent": true + } + }); + + let llm_extraction_result = app + .scrape_url("https://news.ycombinator.com", Some(llm_extraction_params)) + .await; + match llm_extraction_result { + Ok(data) => println!("LLM Extraction Result:\n{}", data["llm_extraction"]), + Err(e) => eprintln!("LLM Extraction failed: {}", e), + } +} diff --git a/apps/rust-sdk/src/lib.rs b/apps/rust-sdk/src/lib.rs new file mode 100644 index 00000000..a2ca75ad --- /dev/null +++ b/apps/rust-sdk/src/lib.rs @@ -0,0 +1,373 @@ +/* +* +* - Structs and Enums: +* FirecrawlError: Custom error enum for handling various errors. +* FirecrawlApp: Main struct for the application, holding API key, URL, and HTTP client. +* +* - Initialization: +* +* FirecrawlApp::new initializes the struct, fetching the API key and URL from environment variables if not provided. +* +* - API Methods: +* scrape_url, search, crawl_url, check_crawl_status: +* Methods for interacting with the Firecrawl API, similar to the Python methods. +* monitor_job_status: Polls the API to monitor the status of a crawl job until completion. +*/ + +use std::env; +use std::thread; +use std::time::Duration; + +use log::debug; +use reqwest::{Client, Response}; +use serde_json::json; +use serde_json::Value; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum FirecrawlError { + #[error("HTTP request failed: {0}")] + HttpRequestFailed(String), + #[error("API key not provided")] + ApiKeyNotProvided, + #[error("Failed to parse response: {0}")] + ResponseParseError(String), + #[error("Crawl job failed or stopped: {0}")] + CrawlJobFailed(String), +} + +#[derive(Clone, Debug)] +pub struct FirecrawlApp { + api_key: String, + api_url: String, + client: Client, +} +// the api verstion of firecrawl +const API_VERSION: &str = "/v0"; + +impl FirecrawlApp { + /// Initialize the FirecrawlApp instance. + /// + /// # Arguments: + /// * `api_key` (Optional[str]): API key for authenticating with the Firecrawl API. + /// * `api_url` (Optional[str]): Base URL for the Firecrawl API. + pub fn new(api_key: Option, api_url: Option) -> Result { + let api_key = api_key + .or_else(|| env::var("FIRECRAWL_API_KEY").ok()) + .ok_or(FirecrawlError::ApiKeyNotProvided)?; + let api_url = api_url.unwrap_or_else(|| { + env::var("FIRECRAWL_API_URL") + .unwrap_or_else(|_| "https://api.firecrawl.dev".to_string()) + }); + + debug!("Initialized FirecrawlApp with API key: {}", api_key); + debug!("Initialized FirecrawlApp with API URL: {}", api_url); + + Ok(FirecrawlApp { + api_key, + api_url, + client: Client::new(), + }) + } + + /// Scrape the specified URL using the Firecrawl API. + /// + /// # Arguments: + /// * `url` (str): The URL to scrape. + /// * `params` (Optional[Dict[str, Any]]): Additional parameters for the scrape request. + /// + /// # Returns: + /// * `Any`: The scraped data if the request is successful. + /// + /// # Raises: + /// * `Exception`: If the scrape request fails. + pub async fn scrape_url( + &self, + url: &str, + params: Option, + ) -> Result { + let headers = self.prepare_headers(None); + let mut scrape_params = json!({"url": url}); + + if let Some(mut params) = params { + if let Some(extractor_options) = params.get_mut("extractorOptions") { + if let Some(extraction_schema) = extractor_options.get_mut("extractionSchema") { + if extraction_schema.is_object() && extraction_schema.get("schema").is_some() { + extractor_options["extractionSchema"] = extraction_schema["schema"].clone(); + } + extractor_options["mode"] = extractor_options + .get("mode") + .cloned() + .unwrap_or_else(|| json!("llm-extraction")); + } + scrape_params["extractorOptions"] = extractor_options.clone(); + } + for (key, value) in params.as_object().unwrap() { + if key != "extractorOptions" { + scrape_params[key] = value.clone(); + } + } + } + + let response = self + .client + .post(&format!("{}{}/scrape", self.api_url, API_VERSION)) + .headers(headers) + .json(&scrape_params) + .send() + .await + .map_err(|e| FirecrawlError::HttpRequestFailed(e.to_string()))?; + + self.handle_response(response, "scrape URL").await + } + + /// Perform a search using the Firecrawl API. + /// + /// # Arguments: + /// * `query` (str): The search query. + /// * `params` (Optional[Dict[str, Any]]): Additional parameters for the search request. + /// + /// # Returns: + /// * `Any`: The search results if the request is successful. + /// + /// # Raises: + /// * `Exception`: If the search request fails. + pub async fn search( + &self, + query: &str, + params: Option, + ) -> Result { + let headers = self.prepare_headers(None); + let mut json_data = json!({"query": query}); + if let Some(params) = params { + for (key, value) in params.as_object().unwrap() { + json_data[key] = value.clone(); + } + } + + let response = self + .client + .post(&format!("{}{}/search", self.api_url, API_VERSION)) + .headers(headers) + .json(&json_data) + .send() + .await + .map_err(|e| FirecrawlError::HttpRequestFailed(e.to_string()))?; + + self.handle_response(response, "search").await + } + + /// Initiate a crawl job for the specified URL using the Firecrawl API. + /// + /// # Arguments: + /// * `url` (str): The URL to crawl. + /// * `params` (Optional[Dict[str, Any]]): Additional parameters for the crawl request. + /// * `wait_until_done` (bool): Whether to wait until the crawl job is completed. + /// * `poll_interval` (int): Time in seconds between status checks when waiting for job completion. + /// * `idempotency_key` (Optional[str]): A unique uuid key to ensure idempotency of requests. + /// + /// # Returns: + /// * `Any`: The crawl job ID or the crawl results if waiting until completion. + /// + /// # `Raises`: + /// * `Exception`: If the crawl job initiation or monitoring fails. + pub async fn crawl_url( + &self, + url: &str, + params: Option, + wait_until_done: bool, + poll_interval: u64, + idempotency_key: Option, + ) -> Result { + let headers = self.prepare_headers(idempotency_key); + let mut json_data = json!({"url": url}); + if let Some(params) = params { + for (key, value) in params.as_object().unwrap() { + json_data[key] = value.clone(); + } + } + + let response = self + .client + .post(&format!("{}{}/crawl", self.api_url, API_VERSION)) + .headers(headers.clone()) + .json(&json_data) + .send() + .await + .map_err(|e| FirecrawlError::HttpRequestFailed(e.to_string()))?; + + let response_json = self.handle_response(response, "start crawl job").await?; + let job_id = response_json["jobId"].as_str().unwrap().to_string(); + + if wait_until_done { + self.monitor_job_status(&job_id, headers, poll_interval) + .await + } else { + Ok(json!({"jobId": job_id})) + } + } + + /// Check the status of a crawl job using the Firecrawl API. + /// + /// # Arguments: + /// * `job_id` (str): The ID of the crawl job. + /// + /// # Returns: + /// * `Any`: The status of the crawl job. + /// + /// # Raises: + /// * `Exception`: If the status check request fails. + pub async fn check_crawl_status(&self, job_id: &str) -> Result { + let headers = self.prepare_headers(None); + let response = self + .client + .get(&format!( + "{}{}/crawl/status/{}", + self.api_url, API_VERSION, job_id + )) + .headers(headers) + .send() + .await + .map_err(|e| FirecrawlError::HttpRequestFailed(e.to_string()))?; + + self.handle_response(response, "check crawl status").await + } + + /// Monitor the status of a crawl job until completion. + /// + /// # Arguments: + /// * `job_id` (str): The ID of the crawl job. + /// * `headers` (Dict[str, str]): The headers to include in the status check requests. + /// * `poll_interval` (int): Secounds between status checks. + /// + /// # Returns: + /// * `Any`: The crawl results if the job is completed successfully. + /// + /// # Raises: + /// Exception: If the job fails or an error occurs during status checks. + async fn monitor_job_status( + &self, + job_id: &str, + headers: reqwest::header::HeaderMap, + poll_interval: u64, + ) -> Result { + loop { + let response = self + .client + .get(&format!( + "{}{}/crawl/status/{}", + self.api_url, API_VERSION, job_id + )) + .headers(headers.clone()) + .send() + .await + .map_err(|e| FirecrawlError::HttpRequestFailed(e.to_string()))?; + + let status_data = self.handle_response(response, "check crawl status").await?; + match status_data["status"].as_str() { + Some("completed") => { + if status_data["data"].is_object() { + return Ok(status_data["data"].clone()); + } else { + return Err(FirecrawlError::CrawlJobFailed( + "Crawl job completed but no data was returned".to_string(), + )); + } + } + Some("active") | Some("paused") | Some("pending") | Some("queued") + | Some("waiting") => { + thread::sleep(Duration::from_secs(poll_interval)); + } + Some(status) => { + return Err(FirecrawlError::CrawlJobFailed(format!( + "Crawl job failed or was stopped. Status: {}", + status + ))); + } + None => { + return Err(FirecrawlError::CrawlJobFailed( + "Unexpected response: no status field".to_string(), + )); + } + } + } + } + + /// Prepare the headers for API requests. + /// + /// # Arguments: + /// `idempotency_key` (Optional[str]): A unique key to ensure idempotency of requests. + /// + /// # Returns: + /// Dict[str, str]: The headers including content type, authorization, and optionally idempotency key. + fn prepare_headers(&self, idempotency_key: Option) -> reqwest::header::HeaderMap { + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert("Content-Type", "application/json".parse().unwrap()); + headers.insert( + "Authorization", + format!("Bearer {}", self.api_key).parse().unwrap(), + ); + if let Some(key) = idempotency_key { + headers.insert("x-idempotency-key", key.parse().unwrap()); + } + headers + } + + /// Handle errors from API responses. + /// + /// # Arguments: + /// * `response` (requests.Response): The response object from the API request. + /// * `action` (str): Description of the action that was being performed. + /// + /// # Raises: + /// Exception: An exception with a message containing the status code and error details from the response. + async fn handle_response( + &self, + response: Response, + action: &str, + ) -> Result { + if response.status().is_success() { + let response_json: Value = response + .json() + .await + .map_err(|e| FirecrawlError::ResponseParseError(e.to_string()))?; + if response_json["success"].as_bool().unwrap_or(false) { + Ok(response_json["data"].clone()) + } else { + Err(FirecrawlError::HttpRequestFailed(format!( + "Failed to {}: {}", + action, response_json["error"] + ))) + } + } else { + let status_code = response.status().as_u16(); + let error_message = response + .json::() + .await + .unwrap_or_else(|_| json!({"error": "No additional error details provided."})); + let message = match status_code { + 402 => format!( + "Payment Required: Failed to {}. {}", + action, error_message["error"] + ), + 408 => format!( + "Request Timeout: Failed to {} as the request timed out. {}", + action, error_message["error"] + ), + 409 => format!( + "Conflict: Failed to {} due to a conflict. {}", + action, error_message["error"] + ), + 500 => format!( + "Internal Server Error: Failed to {}. {}", + action, error_message["error"] + ), + _ => format!( + "Unexpected error during {}: Status code {}. {}", + action, status_code, error_message["error"] + ), + }; + Err(FirecrawlError::HttpRequestFailed(message)) + } + } +} diff --git a/apps/go-sdk/.env.example b/apps/rust-sdk/tests/.env.example similarity index 50% rename from apps/go-sdk/.env.example rename to apps/rust-sdk/tests/.env.example index 772a6243..5aa1cb11 100644 --- a/apps/go-sdk/.env.example +++ b/apps/rust-sdk/tests/.env.example @@ -1,2 +1,2 @@ API_URL=http://localhost:3002 -TEST_API_KEY=fc-YOUR-API-KEY +TEST_API_KEY=fc-YOUR_API_KEY diff --git a/apps/rust-sdk/tests/e2e_with_auth.rs b/apps/rust-sdk/tests/e2e_with_auth.rs new file mode 100644 index 00000000..ac9dc1d3 --- /dev/null +++ b/apps/rust-sdk/tests/e2e_with_auth.rs @@ -0,0 +1,174 @@ +use assert_matches::assert_matches; +use dotenv::dotenv; +use firecrawl::FirecrawlApp; +use serde_json::json; +use std::env; +use std::time::Duration; +use tokio::time::sleep; + +#[tokio::test] +async fn test_no_api_key() { + dotenv().ok(); + let api_url = env::var("API_URL").expect("API_URL environment variable is not set"); + assert_matches!(FirecrawlApp::new(None, Some(api_url)), Err(e) if e.to_string() == "API key not provided"); +} + +#[tokio::test] +async fn test_blocklisted_url() { + dotenv().ok(); + let api_url = env::var("API_URL").unwrap(); + let api_key = env::var("TEST_API_KEY").unwrap(); + let app = FirecrawlApp::new(Some(api_key), Some(api_url)).unwrap(); + let blocklisted_url = "https://facebook.com/fake-test"; + let result = app.scrape_url(blocklisted_url, None).await; + + assert_matches!( + result, + Err(e) if e.to_string().contains("Firecrawl currently does not support social media scraping due to policy restrictions") + ); +} + +#[tokio::test] +async fn test_successful_response_with_valid_preview_token() { + dotenv().ok(); + let api_url = env::var("API_URL").unwrap(); + let app = FirecrawlApp::new( + Some("this_is_just_a_preview_token".to_string()), + Some(api_url), + ) + .unwrap(); + let result = app + .scrape_url("https://roastmywebsite.ai", None) + .await + .unwrap(); + assert!(result.as_object().unwrap().contains_key("content")); + assert!(result["content"].as_str().unwrap().contains("_Roast_")); +} + +#[tokio::test] +async fn test_scrape_url_e2e() { + dotenv().ok(); + let api_url = env::var("API_URL").unwrap(); + let api_key = env::var("TEST_API_KEY").unwrap(); + let app = FirecrawlApp::new(Some(api_key), Some(api_url)).unwrap(); + let result = app + .scrape_url("https://roastmywebsite.ai", None) + .await + .unwrap(); + assert!(result.as_object().unwrap().contains_key("content")); + assert!(result.as_object().unwrap().contains_key("markdown")); + assert!(result.as_object().unwrap().contains_key("metadata")); + assert!(!result.as_object().unwrap().contains_key("html")); + assert!(result["content"].as_str().unwrap().contains("_Roast_")); +} + +#[tokio::test] +async fn test_successful_response_with_valid_api_key_and_include_html() { + dotenv().ok(); + let api_url = env::var("API_URL").unwrap(); + let api_key = env::var("TEST_API_KEY").unwrap(); + let app = FirecrawlApp::new(Some(api_key), Some(api_url)).unwrap(); + let params = json!({ + "pageOptions": { + "includeHtml": true + } + }); + let result = app + .scrape_url("https://roastmywebsite.ai", Some(params)) + .await + .unwrap(); + assert!(result.as_object().unwrap().contains_key("content")); + assert!(result.as_object().unwrap().contains_key("markdown")); + assert!(result.as_object().unwrap().contains_key("html")); + assert!(result.as_object().unwrap().contains_key("metadata")); + assert!(result["content"].as_str().unwrap().contains("_Roast_")); + assert!(result["markdown"].as_str().unwrap().contains("_Roast_")); + assert!(result["html"].as_str().unwrap().contains("=14.13.1'} + '@ampproject/remapping@2.3.0': resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} @@ -55,6 +62,220 @@ packages: '@anthropic-ai/sdk@0.24.3': resolution: {integrity: sha512-916wJXO6T6k8R6BAAcLhLPv/pnLGy7YSEBZXZ1XTFbLcTZE8oTy3oDW9WJf9KKZwMvVcePIfoTSvzXHRcGxkQQ==} + '@artilleryio/int-commons@2.10.0': + resolution: {integrity: sha512-CukRix3yxcsbjPTPhIyXN7qZ6f/3W+LQtF96RxuZ7L3P0F7y7t4NswPSll2+zDkAMvvBgFojgPL+bFf2EDIiOA==} + + '@artilleryio/int-core@2.14.0': + resolution: {integrity: sha512-u0q5p5aWE7DRNRnfmj8JzXqNFitoBKQ4/N/Uur4PXcZCPzB7yQWppRPRJqFUh14zmC/UybDMui1EnbDhBqGGIg==} + + '@artilleryio/sketches-js@2.1.1': + resolution: {integrity: sha512-H3D50vDb37E3NGYXY0eUFAm5++moElaqoAu0MWYZhgzaA3IT2E67bRCL8U4LKHuVf/MgDZk14uawIjc4WVjOUQ==} + + '@aws-crypto/sha256-browser@5.2.0': + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} + + '@aws-crypto/sha256-js@5.2.0': + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/supports-web-crypto@5.2.0': + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} + + '@aws-crypto/util@5.2.0': + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} + + '@aws-sdk/client-cloudwatch@3.629.0': + resolution: {integrity: sha512-dMEyyA9EQCLCsZMAMyM2wL/gX99AVxRjhlgap53XkkGi9GgiCer4wLMK+2Nhpu+ncCcQEzEMlVV35YC843T8BQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-cognito-identity@3.629.0': + resolution: {integrity: sha512-bWwp3f8XLCQgXmSWqDMjIb4WHWaEgMEX5D/WLAq7FHMiikdeJgpZIhWcDV05nEW/f9wGS6cxT3ZyXc/PRXMWrQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sso-oidc@3.629.0': + resolution: {integrity: sha512-3if0LauNJPqubGYf8vnlkp+B3yAeKRuRNxfNbHlE6l510xWGcKK/ZsEmiFmfePzKKSRrDh/cxMFMScgOrXptNg==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.629.0 + + '@aws-sdk/client-sso@3.629.0': + resolution: {integrity: sha512-2w8xU4O0Grca5HmT2dXZ5fF0g39RxODtmoqHJDsK5DSt750LqDG4w3ktmBvQs3+SrpkkJOjlX5v/hb2PCxVbww==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sts@3.629.0': + resolution: {integrity: sha512-RjOs371YwnSVGxhPjuluJKaxl4gcPYTAky0nPjwBime0i9/iS9nI8R8l5j7k7ec9tpFWjBPvNnThCU07pvjdzw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/core@3.629.0': + resolution: {integrity: sha512-+/ShPU/tyIBM3oY1cnjgNA/tFyHtlWq+wXF9xEKRv19NOpYbWQ+xzNwVjGq8vR07cCRqy/sDQLWPhxjtuV/FiQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-cognito-identity@3.629.0': + resolution: {integrity: sha512-Cdq7Q0OGdze0/GWClJMcyQRfQmZFryxDIHVFP1FVoQPeKucHU5ZFVPEs+U4UOfs7XquG/+NYCX/jTPdI4ATfgQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-env@3.620.1': + resolution: {integrity: sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-http@3.622.0': + resolution: {integrity: sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-ini@3.629.0': + resolution: {integrity: sha512-r9fI7BABARvVDp77DBUImQzYdvarAIdhbvpCEZib0rlpvfWu3zxE9KZcapCAAi0MPjxeDfb7RMehFQIkAP7mYw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.629.0 + + '@aws-sdk/credential-provider-node@3.629.0': + resolution: {integrity: sha512-868hnVOLlXOBHk91Rl0jZIRgr/M4WJCa0nOrW9A9yidsQxuZp9P0vshDmm4hMvNZadmPIfo0Rra2MpA4RELoCw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-process@3.620.1': + resolution: {integrity: sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-sso@3.629.0': + resolution: {integrity: sha512-Lf4XOuj6jamxgGZGrVojERh5S+NS2t2S4CUOnAu6tJ5U0GPlpjhINUKlcVxJBpsIXudMGW1nkumAd3+kazCPig==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-web-identity@3.621.0': + resolution: {integrity: sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.621.0 + + '@aws-sdk/credential-providers@3.630.0': + resolution: {integrity: sha512-fuF/lAZ5pj0DooT8wA1lcrR8vtzBNdTHA5dAmmV930eDoT5XZ+/NeQnclWds40xGj2KDi5YEhuMwOqEZqEQ0Aw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-host-header@3.620.0': + resolution: {integrity: sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-logger@3.609.0': + resolution: {integrity: sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.620.0': + resolution: {integrity: sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-user-agent@3.620.0': + resolution: {integrity: sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/region-config-resolver@3.614.0': + resolution: {integrity: sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/token-providers@3.614.0': + resolution: {integrity: sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.614.0 + + '@aws-sdk/types@3.609.0': + resolution: {integrity: sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-endpoints@3.614.0': + resolution: {integrity: sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-locate-window@3.568.0': + resolution: {integrity: sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-user-agent-browser@3.609.0': + resolution: {integrity: sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==} + + '@aws-sdk/util-user-agent-node@3.614.0': + resolution: {integrity: sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==} + engines: {node: '>=16.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@azure/abort-controller@1.1.0': + resolution: {integrity: sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==} + engines: {node: '>=12.0.0'} + + '@azure/abort-controller@2.1.2': + resolution: {integrity: sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==} + engines: {node: '>=18.0.0'} + + '@azure/arm-containerinstance@9.1.0': + resolution: {integrity: sha512-N9T3/HJwWXvJuz7tin+nO+DYYCTGHILJ5Die3TtdF8Wd1ITfXGqB0vY/wOnspUu/AGojhaIKGmawAfPdw2kX8w==} + engines: {node: '>=14.0.0'} + + '@azure/core-auth@1.7.2': + resolution: {integrity: sha512-Igm/S3fDYmnMq1uKS38Ae1/m37B3zigdlZw+kocwEhh5GjyKjPrXKO2J6rzpC1wAxrNil/jX9BJRqBshyjnF3g==} + engines: {node: '>=18.0.0'} + + '@azure/core-client@1.9.2': + resolution: {integrity: sha512-kRdry/rav3fUKHl/aDLd/pDLcB+4pOFwPPTVEExuMyaI5r+JBbMWqRbCY1pn5BniDaU3lRxO9eaQ1AmSMehl/w==} + engines: {node: '>=18.0.0'} + + '@azure/core-http-compat@2.1.2': + resolution: {integrity: sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ==} + engines: {node: '>=18.0.0'} + + '@azure/core-lro@2.7.2': + resolution: {integrity: sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==} + engines: {node: '>=18.0.0'} + + '@azure/core-paging@1.6.2': + resolution: {integrity: sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==} + engines: {node: '>=18.0.0'} + + '@azure/core-rest-pipeline@1.16.3': + resolution: {integrity: sha512-VxLk4AHLyqcHsfKe4MZ6IQ+D+ShuByy+RfStKfSjxJoL3WBWq17VNmrz8aT8etKzqc2nAeIyLxScjpzsS4fz8w==} + engines: {node: '>=18.0.0'} + + '@azure/core-tracing@1.1.2': + resolution: {integrity: sha512-dawW9ifvWAWmUm9/h+/UQ2jrdvjCJ7VJEuCJ6XVNudzcOwm53BFZH4Q845vjfgoUAM8ZxokvVNxNxAITc502YA==} + engines: {node: '>=18.0.0'} + + '@azure/core-util@1.9.2': + resolution: {integrity: sha512-l1Qrqhi4x1aekkV+OlcqsJa4AnAkj5p0JV8omgwjaV9OAbP41lvrMvs+CptfetKkeEaGRGSzby7sjPZEX7+kkQ==} + engines: {node: '>=18.0.0'} + + '@azure/core-xml@1.4.3': + resolution: {integrity: sha512-D6G7FEmDiTctPKuWegX2WTrS1enKZwqYwdKTO6ZN6JMigcCehlT0/CYl+zWpI9vQ9frwwp7GQT3/owaEXgnOsA==} + engines: {node: '>=18.0.0'} + + '@azure/identity@4.4.1': + resolution: {integrity: sha512-DwnG4cKFEM7S3T+9u05NstXU/HN0dk45kPOinUyNKsn5VWwpXd9sbPKEg6kgJzGbm1lMuhx9o31PVbCtM5sfBA==} + engines: {node: '>=18.0.0'} + + '@azure/logger@1.1.4': + resolution: {integrity: sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==} + engines: {node: '>=18.0.0'} + + '@azure/msal-browser@3.21.0': + resolution: {integrity: sha512-BAwcFsVvOrYzKuUZHhFuvRykUmQGq6lDxst2qGnjxnpNZc3d/tnVPcmhgvUdeKl28VSE0ltgBzT3HkdpDtz9rg==} + engines: {node: '>=0.8.0'} + + '@azure/msal-common@14.14.1': + resolution: {integrity: sha512-2Q3tqNz/PZLfSr8BvcHZVpRRfSn4MjGSqjj9J+HlBsmbf1Uu4P0WeXnemjTJwwx9KrmplsrN3UkZ/LPOR720rw==} + engines: {node: '>=0.8.0'} + + '@azure/msal-node@2.13.0': + resolution: {integrity: sha512-DhP97ycs7qlCVzzzWGzJiwAFyFj5okno74E4FUZ61oCLfKh4IxA1kxirqzrWuYZWpBe9HVPL6GA4NvmlEOBN5Q==} + engines: {node: '>=16'} + + '@azure/storage-blob@12.24.0': + resolution: {integrity: sha512-l8cmWM4C7RoNCBOImoFMxhTXe1Lr+8uQ/IgnhRNMpfoA9bAFWoLG4XrWm6O5rKXortreVQuD+fc1hbzWklOZbw==} + engines: {node: '>=18.0.0'} + + '@azure/storage-queue@12.23.0': + resolution: {integrity: sha512-koVDpx/lXl3bx6GiyitIsLZ4rtywpTlfwKXiuTDif+dY6PhgSyN9mrq9AsHXaHQnx2CCpmoIzRSV5n4GoQGcmg==} + engines: {node: '>=18.0.0'} + '@babel/code-frame@7.24.2': resolution: {integrity: sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==} engines: {node: '>=6.9.0'} @@ -219,12 +440,60 @@ packages: resolution: {integrity: sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==} engines: {node: '>=6.9.0'} + '@base2/pretty-print-object@1.0.1': + resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} + '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + '@colors/colors@1.5.0': + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + + '@cspotcode/source-map-support@0.8.1': + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + + '@dependents/detective-less@4.1.0': + resolution: {integrity: sha512-KrkT6qO5NxqNfy68sBl6CTSoJ4SNDIS5iQArkibhlbGU4LaDukZ3q2HIkh8aUKDio6o4itU4xDR7t82Y2eP1Bg==} + engines: {node: '>=14'} + '@dqbd/tiktoken@1.0.15': resolution: {integrity: sha512-a6I67K1xUkuqcuwulobIJiLikkoE7egMaviI1Jg5bxSn2V7QGqXsGE3jTKr8UIOU/o74mAAd5TkeXFNBtaKF4A==} + '@grpc/grpc-js@1.11.1': + resolution: {integrity: sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.13': + resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==} + engines: {node: '>=6'} + hasBin: true + + '@hapi/hoek@9.3.0': + resolution: {integrity: sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==} + + '@hapi/topo@5.1.0': + resolution: {integrity: sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==} + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@isaacs/ts-node-temp-fork-for-pr-2009@10.9.7': + resolution: {integrity: sha512-9f0bhUr9TnwwpgUhEpr3FjxSaH/OHaARkE2F9fM0lS4nIs2GNerrvGwQz493dk0JKlTaGYVrKbq36vA/whZ34g==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=4.2' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + '@istanbuljs/load-nyc-config@1.1.0': resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} engines: {node: '>=8'} @@ -317,15 +586,540 @@ packages: '@jridgewell/trace-mapping@0.3.25': resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + '@jridgewell/trace-mapping@0.3.9': + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@ngneat/falso@7.2.0': + resolution: {integrity: sha512-283EXBFd05kCbGuGSXgmvhCsQYEYzvD/eJaE7lxd05qRB0tgREvZX7TRlJ1KSp8nHxoK6Ws029G1Y30mt4IVAA==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@npmcli/agent@2.2.2': + resolution: {integrity: sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/fs@3.1.1': + resolution: {integrity: sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/git@5.0.8': + resolution: {integrity: sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/installed-package-contents@2.1.0': + resolution: {integrity: sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + '@npmcli/node-gyp@3.0.0': + resolution: {integrity: sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/package-json@5.2.0': + resolution: {integrity: sha512-qe/kiqqkW0AGtvBjL8TJKZk/eBBSpnJkUWvHdQ9jM2lKHXRYYJuyNpJPlJw3c8QjC2ow6NZYiLExhUaeJelbxQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/promise-spawn@7.0.2': + resolution: {integrity: sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/redact@1.1.0': + resolution: {integrity: sha512-PfnWuOkQgu7gCbnSsAisaX7hKOdZ4wSAhAzH3/ph5dSGau52kCRrMMGbiSQLwyTZpgldkZ49b0brkOr1AzGBHQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/run-script@7.0.4': + resolution: {integrity: sha512-9ApYM/3+rBt9V80aYg6tZfzj3UWdiYyCt7gJUD1VJKvWF5nwKDSICXbYIQbspFTq6TOpbsEtIC0LArB8d9PFmg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@oclif/core@2.16.0': + resolution: {integrity: sha512-dL6atBH0zCZl1A1IXCKJgLPrM/wR7K+Wi401E/IvqsK8m2iCHW+0TEOGrans/cuN3oTW+uxIyJFHJ8Im0k4qBw==} + engines: {node: '>=14.0.0'} + + '@oclif/plugin-help@5.2.20': + resolution: {integrity: sha512-u+GXX/KAGL9S10LxAwNUaWdzbEBARJ92ogmM7g3gDVud2HioCmvWQCDohNRVZ9GYV9oKwZ/M8xwd6a1d95rEKQ==} + engines: {node: '>=12.0.0'} + + '@oclif/plugin-not-found@2.4.3': + resolution: {integrity: sha512-nIyaR4y692frwh7wIHZ3fb+2L6XEecQwRDIb4zbEam0TvaVmBQWZoColQyWA84ljFBPZ8XWiQyTz+ixSwdRkqg==} + engines: {node: '>=12.0.0'} + + '@opentelemetry/api-logs@0.41.2': + resolution: {integrity: sha512-JEV2RAqijAFdWeT6HddYymfnkiRu2ASxoTBr4WsnGJhOjWZkEy6vp+Sx9ozr1NaIODOa2HUyckExIqQjn6qywQ==} + engines: {node: '>=14'} + + '@opentelemetry/api-logs@0.43.0': + resolution: {integrity: sha512-0CXMOYPXgAdLM2OzVkiUfAL6QQwWVhnMfUXCqLsITY42FZ9TxAhZIHkoc4mfVxvPuXsBnRYGR8UQZX86p87z4A==} + engines: {node: '>=14'} + + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/context-async-hooks@1.25.1': + resolution: {integrity: sha512-UW/ge9zjvAEmRWVapOP0qyCvPulWU6cQxGxDbWEFfGOj1VBBZAuOqTo3X6yWmDTD3Xe15ysCZChHncr2xFMIfQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@1.15.2': + resolution: {integrity: sha512-+gBv15ta96WqkHZaPpcDHiaz0utiiHZVfm2YOYSqFGrUaJpPkMoSuLBB58YFQGi6Rsb9EHos84X6X5+9JspmLw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.5.0' + + '@opentelemetry/core@1.17.0': + resolution: {integrity: sha512-tfnl3h+UefCgx1aeN2xtrmr6BmdWGKXypk0pflQR0urFS40aE88trnkOMc2HTJZbMrqEEl4HsaBeFhwLVXsrJg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + + '@opentelemetry/core@1.25.1': + resolution: {integrity: sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/exporter-metrics-otlp-grpc@0.41.2': + resolution: {integrity: sha512-gQuCcd5QSMkfi1XIriWAoak/vaRvFzpvtzh2hjziIvbnA3VtoGD3bDb2dzEzOA1iSWO0/tHwnBsSmmUZsETyOA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-http@0.41.2': + resolution: {integrity: sha512-+YeIcL4nuldWE89K8NBLImpXCvih04u1MBnn8EzvoywG2TKR5JC3CZEPepODIxlsfGSgP8W5khCEP1NHZzftYw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-proto@0.41.2': + resolution: {integrity: sha512-OLNs6wF84uhxn8TJ8Bv1q2ltdJqjKA9oUEtICcUDDzXIiztPxZ9ur/4xdMk9T3ZJeFMfrhj8eYDkpETBy+fjCg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-grpc@0.43.0': + resolution: {integrity: sha512-h/oofzwyONMcAeBXD6+E6+foFQg9CPadBFcKAGoMIyVSK7iZgtK5DLEwAF4jz5MhfxWNmwZjHXFRc0GqCRx/tA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/exporter-trace-otlp-http@0.41.2': + resolution: {integrity: sha512-Y0fGLipjZXLMelWtlS1/MDtrPxf25oM408KukRdkN31a1MEFo4h/ZkNwS7ZfmqHGUa+4rWRt2bi6JBiqy7Ytgw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/exporter-trace-otlp-proto@0.41.2': + resolution: {integrity: sha512-IGZga9IIckqYE3IpRE9FO9G5umabObIrChlXUHYpMJtDgx797dsb3qXCvLeuAwB+HoB8NsEZstlzmLnoa6/HmA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/exporter-zipkin@1.25.1': + resolution: {integrity: sha512-RmOwSvkimg7ETwJbUOPTMhJm9A9bG1U8s7Zo3ajDh4zM7eYcycQ0dM7FbLD6NXWbI2yj7UY4q8BKinKYBQksyw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-exporter-base@0.41.2': + resolution: {integrity: sha512-pfwa6d+Dax3itZcGWiA0AoXeVaCuZbbqUTsCtOysd2re8C2PWXNxDONUfBWsn+KgxAdi+ljwTjJGiaVLDaIEvQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-exporter-base@0.43.0': + resolution: {integrity: sha512-LXNtRFVuPRXB9q0qdvrLikQ3NtT9Jmv255Idryz3RJPhOh/Fa03sBASQoj3D55OH3xazmA90KFHfhJ/d8D8y4A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.41.2': + resolution: {integrity: sha512-OErK8dYjXG01XIMIpmOV2SzL9ctkZ0Nyhf2UumICOAKtgLvR5dG1JMlsNVp8Jn0RzpsKc6Urv7JpP69wzRXN+A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.43.0': + resolution: {integrity: sha512-oOpqtDJo9BBa1+nD6ID1qZ55ZdTwEwSSn2idMobw8jmByJKaanVLdr9SJKsn5T9OBqo/c5QY2brMf0TNZkobJQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-proto-exporter-base@0.41.2': + resolution: {integrity: sha512-BxmEMiP6tHiFroe5/dTt9BsxCci7BTLtF7A6d4DKHLiLweWWZxQ9l7hON7qt/IhpKrQcAFD1OzZ1Gq2ZkNzhCw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/otlp-transformer@0.41.2': + resolution: {integrity: sha512-jJbPwB0tNu2v+Xi0c/v/R3YBLJKLonw1p+v3RVjT2VfzeUyzSp/tBeVdY7RZtL6dzZpA9XSmp8UEfWIFQo33yA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.5.0' + + '@opentelemetry/otlp-transformer@0.43.0': + resolution: {integrity: sha512-KXYmgzWdVBOD5NvPmGW1nEMJjyQ8gK3N8r6pi4HvmEhTp0v4T13qDSax4q0HfsqmbPJR355oqQSJUnu1dHNutw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.7.0' + + '@opentelemetry/resources@1.15.2': + resolution: {integrity: sha512-xmMRLenT9CXmm5HMbzpZ1hWhaUowQf8UB4jMjFlAxx1QzQcsD3KFNAVX/CAWzFPtllTyTplrA4JrQ7sCH3qmYw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.5.0' + + '@opentelemetry/resources@1.17.0': + resolution: {integrity: sha512-+u0ciVnj8lhuL/qGRBPeVYvk7fL+H/vOddfvmOeJaA1KC+5/3UED1c9KoZQlRsNT5Kw1FaK8LkY2NVLYfOVZQw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + + '@opentelemetry/resources@1.25.1': + resolution: {integrity: sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.41.2': + resolution: {integrity: sha512-smqKIw0tTW15waj7BAPHFomii5c3aHnSE4LQYTszGoK5P9nZs8tEAIpu15UBxi3aG31ZfsLmm4EUQkjckdlFrw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.5.0' + '@opentelemetry/api-logs': '>=0.39.1' + + '@opentelemetry/sdk-logs@0.43.0': + resolution: {integrity: sha512-JyJ2BBRKm37Mc4cSEhFmsMl5ASQn1dkGhEWzAAMSlhPtLRTv5PfvJwhR+Mboaic/eDLAlciwsgijq8IFlf6IgQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.7.0' + '@opentelemetry/api-logs': '>=0.39.1' + + '@opentelemetry/sdk-metrics@1.15.2': + resolution: {integrity: sha512-9aIlcX8GnhcsAHW/Wl8bzk4ZnWTpNlLtud+fxUfBtFATu6OZ6TrGrF4JkT9EVrnoxwtPIDtjHdEsSjOqisY/iA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.5.0' + + '@opentelemetry/sdk-metrics@1.17.0': + resolution: {integrity: sha512-HlWM27yGmYuwCoVRe3yg2PqKnIsq0kEF0HQgvkeDWz2NYkq9fFaSspR6kvjxUTbghAlZrabiqbgyKoYpYaXS3w==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.7.0' + + '@opentelemetry/sdk-metrics@1.25.1': + resolution: {integrity: sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@1.15.2': + resolution: {integrity: sha512-BEaxGZbWtvnSPchV98qqqqa96AOcb41pjgvhfzDij10tkBhIu9m0Jd6tZ1tJB5ZHfHbTffqYVYE0AOGobec/EQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.5.0' + + '@opentelemetry/sdk-trace-base@1.17.0': + resolution: {integrity: sha512-2T5HA1/1iE36Q9eg6D4zYlC4Y4GcycI1J6NsHPKZY9oWfAxWsoYnRlkPfUqyY5XVtocCo/xHpnJvGNHwzT70oQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.7.0' + + '@opentelemetry/sdk-trace-base@1.25.1': + resolution: {integrity: sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/semantic-conventions@1.15.2': + resolution: {integrity: sha512-CjbOKwk2s+3xPIMcd5UNYQzsf+v94RczbdNix9/kQh38WiQkM90sUOi3if8eyHFgiBjBjhwXrA7W3ydiSQP9mw==} + engines: {node: '>=14'} + + '@opentelemetry/semantic-conventions@1.17.0': + resolution: {integrity: sha512-+fguCd2d8d2qruk0H0DsCEy2CTK3t0Tugg7MhZ/UQMvmewbZLNnJ6heSYyzIZWG5IPfAXzoj4f4F/qpM7l4VBA==} + engines: {node: '>=14'} + + '@opentelemetry/semantic-conventions@1.25.1': + resolution: {integrity: sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==} + engines: {node: '>=14'} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@playwright/browser-chromium@1.45.3': + resolution: {integrity: sha512-UVPW8HveE8SghaahoMy8CfG0QdJ2mO0BZLOcPT8nlQh7Z97Gkv4e3Ad69D1oCqM3m3zYkDPAiGB+hOASNS0d/g==} + engines: {node: '>=18'} + + '@playwright/test@1.45.3': + resolution: {integrity: sha512-UKF4XsBfy+u3MFWEH44hva1Q8Da28G6RFtR2+5saw+jgAFQV5yYnB1fu68Mz7fO+5GJF3wgwAIs0UelU8TxFrA==} + engines: {node: '>=18'} + hasBin: true + + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + + '@sideway/address@4.1.5': + resolution: {integrity: sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==} + + '@sideway/formula@3.0.1': + resolution: {integrity: sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==} + + '@sideway/pinpoint@2.0.0': + resolution: {integrity: sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==} + + '@sigstore/bundle@2.3.2': + resolution: {integrity: sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/core@1.1.0': + resolution: {integrity: sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/protobuf-specs@0.3.2': + resolution: {integrity: sha512-c6B0ehIWxMI8wiS/bj6rHMPqeFvngFV7cDU/MY+B16P9Z3Mp9k8L93eYZ7BYzSickzuqAQqAq0V956b3Ju6mLw==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/sign@2.3.2': + resolution: {integrity: sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/tuf@2.3.4': + resolution: {integrity: sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/verify@1.2.1': + resolution: {integrity: sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==} + engines: {node: ^16.14.0 || >=18.0.0} + '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + '@sinonjs/commons@3.0.1': resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} '@sinonjs/fake-timers@10.3.0': resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==} + '@smithy/abort-controller@3.1.1': + resolution: {integrity: sha512-MBJBiidoe+0cTFhyxT8g+9g7CeVccLM0IOKKUMCNQ1CNMJ/eIfoo0RTfVrXOONEI1UCN1W+zkiHSbzUNE9dZtQ==} + engines: {node: '>=16.0.0'} + + '@smithy/config-resolver@3.0.5': + resolution: {integrity: sha512-SkW5LxfkSI1bUC74OtfBbdz+grQXYiPYolyu8VfpLIjEoN/sHVBlLeGXMQ1vX4ejkgfv6sxVbQJ32yF2cl1veA==} + engines: {node: '>=16.0.0'} + + '@smithy/core@2.3.2': + resolution: {integrity: sha512-in5wwt6chDBcUv1Lw1+QzZxN9fBffi+qOixfb65yK4sDuKG7zAUO9HAFqmVzsZM3N+3tTyvZjtnDXePpvp007Q==} + engines: {node: '>=16.0.0'} + + '@smithy/credential-provider-imds@3.2.0': + resolution: {integrity: sha512-0SCIzgd8LYZ9EJxUjLXBmEKSZR/P/w6l7Rz/pab9culE/RWuqelAKGJvn5qUOl8BgX8Yj5HWM50A5hiB/RzsgA==} + engines: {node: '>=16.0.0'} + + '@smithy/fetch-http-handler@3.2.4': + resolution: {integrity: sha512-kBprh5Gs5h7ug4nBWZi1FZthdqSM+T7zMmsZxx0IBvWUn7dK3diz2SHn7Bs4dQGFDk8plDv375gzenDoNwrXjg==} + + '@smithy/hash-node@3.0.3': + resolution: {integrity: sha512-2ctBXpPMG+B3BtWSGNnKELJ7SH9e4TNefJS0cd2eSkOOROeBnnVBnAy9LtJ8tY4vUEoe55N4CNPxzbWvR39iBw==} + engines: {node: '>=16.0.0'} + + '@smithy/invalid-dependency@3.0.3': + resolution: {integrity: sha512-ID1eL/zpDULmHJbflb864k72/SNOZCADRc9i7Exq3RUNJw6raWUSlFEQ+3PX3EYs++bTxZB2dE9mEHTQLv61tw==} + + '@smithy/is-array-buffer@2.2.0': + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + + '@smithy/is-array-buffer@3.0.0': + resolution: {integrity: sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-compression@3.0.7': + resolution: {integrity: sha512-ide8RSj0HWHq8uGryx1PuhI/0p+xgrrG+atDBgmv1ScIVIBrH7hqk2cfXyZ3+zQYeD2z95iDn75U1BHwlSwhag==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-content-length@3.0.5': + resolution: {integrity: sha512-ILEzC2eyxx6ncej3zZSwMpB5RJ0zuqH7eMptxC4KN3f+v9bqT8ohssKbhNR78k/2tWW+KS5Spw+tbPF4Ejyqvw==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-endpoint@3.1.0': + resolution: {integrity: sha512-5y5aiKCEwg9TDPB4yFE7H6tYvGFf1OJHNczeY10/EFF8Ir8jZbNntQJxMWNfeQjC1mxPsaQ6mR9cvQbf+0YeMw==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-retry@3.0.14': + resolution: {integrity: sha512-7ZaWZJOjUxa5hgmuMspyt8v/zVsh0GXYuF7OvCmdcbVa/xbnKQoYC+uYKunAqRGTkxjOyuOCw9rmFUFOqqC0eQ==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-serde@3.0.3': + resolution: {integrity: sha512-puUbyJQBcg9eSErFXjKNiGILJGtiqmuuNKEYNYfUD57fUl4i9+mfmThtQhvFXU0hCVG0iEJhvQUipUf+/SsFdA==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-stack@3.0.3': + resolution: {integrity: sha512-r4klY9nFudB0r9UdSMaGSyjyQK5adUyPnQN/ZM6M75phTxOdnc/AhpvGD1fQUvgmqjQEBGCwpnPbDm8pH5PapA==} + engines: {node: '>=16.0.0'} + + '@smithy/node-config-provider@3.1.4': + resolution: {integrity: sha512-YvnElQy8HR4vDcAjoy7Xkx9YT8xZP4cBXcbJSgm/kxmiQu08DwUwj8rkGnyoJTpfl/3xYHH+d8zE+eHqoDCSdQ==} + engines: {node: '>=16.0.0'} + + '@smithy/node-http-handler@3.1.4': + resolution: {integrity: sha512-+UmxgixgOr/yLsUxcEKGH0fMNVteJFGkmRltYFHnBMlogyFdpzn2CwqWmxOrfJELhV34v0WSlaqG1UtE1uXlJg==} + engines: {node: '>=16.0.0'} + + '@smithy/property-provider@3.1.3': + resolution: {integrity: sha512-zahyOVR9Q4PEoguJ/NrFP4O7SMAfYO1HLhB18M+q+Z4KFd4V2obiMnlVoUFzFLSPeVt1POyNWneHHrZaTMoc/g==} + engines: {node: '>=16.0.0'} + + '@smithy/protocol-http@4.1.0': + resolution: {integrity: sha512-dPVoHYQ2wcHooGXg3LQisa1hH0e4y0pAddPMeeUPipI1tEOqL6A4N0/G7abeq+K8wrwSgjk4C0wnD1XZpJm5aA==} + engines: {node: '>=16.0.0'} + + '@smithy/querystring-builder@3.0.3': + resolution: {integrity: sha512-vyWckeUeesFKzCDaRwWLUA1Xym9McaA6XpFfAK5qI9DKJ4M33ooQGqvM4J+LalH4u/Dq9nFiC8U6Qn1qi0+9zw==} + engines: {node: '>=16.0.0'} + + '@smithy/querystring-parser@3.0.3': + resolution: {integrity: sha512-zahM1lQv2YjmznnfQsWbYojFe55l0SLG/988brlLv1i8z3dubloLF+75ATRsqPBboUXsW6I9CPGE5rQgLfY0vQ==} + engines: {node: '>=16.0.0'} + + '@smithy/service-error-classification@3.0.3': + resolution: {integrity: sha512-Jn39sSl8cim/VlkLsUhRFq/dKDnRUFlfRkvhOJaUbLBXUsLRLNf9WaxDv/z9BjuQ3A6k/qE8af1lsqcwm7+DaQ==} + engines: {node: '>=16.0.0'} + + '@smithy/shared-ini-file-loader@3.1.4': + resolution: {integrity: sha512-qMxS4hBGB8FY2GQqshcRUy1K6k8aBWP5vwm8qKkCT3A9K2dawUwOIJfqh9Yste/Bl0J2lzosVyrXDj68kLcHXQ==} + engines: {node: '>=16.0.0'} + + '@smithy/signature-v4@4.1.0': + resolution: {integrity: sha512-aRryp2XNZeRcOtuJoxjydO6QTaVhxx/vjaR+gx7ZjaFgrgPRyZ3HCTbfwqYj6ZWEBHkCSUfcaymKPURaByukag==} + engines: {node: '>=16.0.0'} + + '@smithy/smithy-client@3.1.12': + resolution: {integrity: sha512-wtm8JtsycthkHy1YA4zjIh2thJgIQ9vGkoR639DBx5lLlLNU0v4GARpQZkr2WjXue74nZ7MiTSWfVrLkyD8RkA==} + engines: {node: '>=16.0.0'} + + '@smithy/types@3.3.0': + resolution: {integrity: sha512-IxvBBCTFDHbVoK7zIxqA1ZOdc4QfM5HM7rGleCuHi7L1wnKv5Pn69xXJQ9hgxH60ZVygH9/JG0jRgtUncE3QUA==} + engines: {node: '>=16.0.0'} + + '@smithy/url-parser@3.0.3': + resolution: {integrity: sha512-pw3VtZtX2rg+s6HMs6/+u9+hu6oY6U7IohGhVNnjbgKy86wcIsSZwgHrFR+t67Uyxvp4Xz3p3kGXXIpTNisq8A==} + + '@smithy/util-base64@3.0.0': + resolution: {integrity: sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-body-length-browser@3.0.0': + resolution: {integrity: sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==} + + '@smithy/util-body-length-node@3.0.0': + resolution: {integrity: sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-buffer-from@2.2.0': + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} + engines: {node: '>=14.0.0'} + + '@smithy/util-buffer-from@3.0.0': + resolution: {integrity: sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-config-provider@3.0.0': + resolution: {integrity: sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-defaults-mode-browser@3.0.14': + resolution: {integrity: sha512-0iwTgKKmAIf+vFLV8fji21Jb2px11ktKVxbX6LIDPAUJyWQqGqBVfwba7xwa1f2FZUoolYQgLvxQEpJycXuQ5w==} + engines: {node: '>= 10.0.0'} + + '@smithy/util-defaults-mode-node@3.0.14': + resolution: {integrity: sha512-e9uQarJKfXApkTMMruIdxHprhcXivH1flYCe8JRDTzkkLx8dA3V5J8GZlST9yfDiRWkJpZJlUXGN9Rc9Ade3OQ==} + engines: {node: '>= 10.0.0'} + + '@smithy/util-endpoints@2.0.5': + resolution: {integrity: sha512-ReQP0BWihIE68OAblC/WQmDD40Gx+QY1Ez8mTdFMXpmjfxSyz2fVQu3A4zXRfQU9sZXtewk3GmhfOHswvX+eNg==} + engines: {node: '>=16.0.0'} + + '@smithy/util-hex-encoding@3.0.0': + resolution: {integrity: sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-middleware@3.0.3': + resolution: {integrity: sha512-l+StyYYK/eO3DlVPbU+4Bi06Jjal+PFLSMmlWM1BEwyLxZ3aKkf1ROnoIakfaA7mC6uw3ny7JBkau4Yc+5zfWw==} + engines: {node: '>=16.0.0'} + + '@smithy/util-retry@3.0.3': + resolution: {integrity: sha512-AFw+hjpbtVApzpNDhbjNG5NA3kyoMs7vx0gsgmlJF4s+yz1Zlepde7J58zpIRIsdjc+emhpAITxA88qLkPF26w==} + engines: {node: '>=16.0.0'} + + '@smithy/util-stream@3.1.3': + resolution: {integrity: sha512-FIv/bRhIlAxC0U7xM1BCnF2aDRPq0UaelqBHkM2lsCp26mcBbgI0tCVTv+jGdsQLUmAMybua/bjDsSu8RQHbmw==} + engines: {node: '>=16.0.0'} + + '@smithy/util-uri-escape@3.0.0': + resolution: {integrity: sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==} + engines: {node: '>=16.0.0'} + + '@smithy/util-utf8@2.3.0': + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} + engines: {node: '>=14.0.0'} + + '@smithy/util-utf8@3.0.0': + resolution: {integrity: sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-waiter@3.1.2': + resolution: {integrity: sha512-4pP0EV3iTsexDx+8PPGAKCQpd/6hsQBaQhqWzU4hqKPHN5epPsxKbvUTIiYIHTxaKt6/kEaqPBpu/ufvfbrRzw==} + engines: {node: '>=16.0.0'} + + '@socket.io/component-emitter@3.1.2': + resolution: {integrity: sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==} + '@supabase/auth-js@2.64.2': resolution: {integrity: sha512-s+lkHEdGiczDrzXJ1YWt2y3bxRi+qIUnXcgkpLSrId7yjBeaXBFygNjTaoZLG02KNcYwbuZ9qkEIqmj2hF7svw==} @@ -348,6 +1142,181 @@ packages: '@supabase/supabase-js@2.44.2': resolution: {integrity: sha512-fouCwL1OxqftOwLNgdDUPlNnFuCnt30nS4kLcnTpe6NYKn1PmjxRRBFmKscgHs6FjWyU+32ZG4uBJ29+/BWiDw==} + '@szmarczak/http-timer@4.0.6': + resolution: {integrity: sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==} + engines: {node: '>=10'} + + '@tapjs/after-each@2.0.8': + resolution: {integrity: sha512-btkpQ/BhmRyG50rezduxEZb3pMJblECvTQa41+U2ln2te1prDTlllHlpq4lOjceUksl8KFF1avDqcBqIqPzneQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/after@1.1.31': + resolution: {integrity: sha512-531NkYOls9PvqfnLsEDRzIWwjynoFRbUVq7pTYuA3PRIw4Ka7jA9uUjILeUurcWjaHrQNzUua0jj/Yu94f6YYw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/asserts@2.0.8': + resolution: {integrity: sha512-57VrI0p2kAqfgHHUwowDvd31eTfDHw3HO4FSSVUCvngPGWa96R6eH9gXa9fNig4qIp4Dup+nI7gJlJfU0R80SA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/before-each@2.0.8': + resolution: {integrity: sha512-Xjgk8/fuP7iFa5CYjFDl05p5PZGRe//VyHJNuYNzWpF1K9PNMtVdlmwplfpFmbrNrw/bIPq7R6LuiPmTBgzuOw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/before@2.0.8': + resolution: {integrity: sha512-22ZdGSn/zOKf8J8cb3yfw5R4I/ozdHEDKL8lBWon/zsxxMMvaRTgOtFXEjb4RE+5SDrqQ4NM7ZRYPGhE7T97dw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/chdir@1.1.4': + resolution: {integrity: sha512-axXkT5kWp2/X8l6inKyrqzUhqgvsgrWI8/0xLAdmirpFZ8H6gFxrl763Ozdm27EAmkLnnnWgFITPqUQCuB/tMA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/config@3.1.6': + resolution: {integrity: sha512-5gkDMSLXL5798bbCdX4RdLpB4OUQeu9TXftzKmL1+1T2xbcd4q7zfDnCfOB9zTk50x2f04+4h6Q7Z1NcSKIspg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + '@tapjs/test': 2.2.4 + + '@tapjs/core@2.1.6': + resolution: {integrity: sha512-NYMp0bl52DxXfcLmivMKvOIE14aaB9qJjdHeUbs6GZ9yxgD5w0yeiOT+gWEL+1PzZgGWRxSFEpghID1YfXAc4w==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + '@tapjs/error-serdes@2.0.1': + resolution: {integrity: sha512-P+M4rtcfkDsUveKKmoRNF+07xpbPnRY5KrstIUOnyn483clQ7BJhsnWr162yYNCsyOj4zEfZmAJI1f8Bi7h/ZA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + '@tapjs/filter@2.0.8': + resolution: {integrity: sha512-/ps6nOS3CTh1WLfCjJnU7tS4PH4KFgEasFSVPCIFN+BasyoqDapzj4JKIlzQvppZOGTQadKH3wUakafZl7uz8w==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/fixture@2.0.8': + resolution: {integrity: sha512-LJnjeAMSozPFXzu+wQw2HJsjA9djHbTcyeMnsgiRL/Q8ffcLqAawV3SN6XKdDLdWYUg3e1fXhHspnbsouZj+xA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/intercept@2.0.8': + resolution: {integrity: sha512-OF2Q35jtZ20bwV4hRNoca7vqIrzPFR3JR25G2rGru+fgPmq4heN0RLoh0d1O34AbrtXqra2lXkacMB/DPgb01A==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/mock@2.1.6': + resolution: {integrity: sha512-bNXKrjg/r+i/gfKij5Oo/5Md2DvGNHPSRCHQmjz3VQjpyxqK7S1FGcR0kyqJ8Nof6Wc8yIhpNOCuibj19200IQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/node-serialize@2.0.8': + resolution: {integrity: sha512-92oqhkmIz5wr0yRs1CPQfim5JSwHPSmoDWnQmJlYUZsY1OYgYouQm3ifnPkqK/9hJpVYzlZEQmefxehxbs2WNQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/processinfo@3.1.8': + resolution: {integrity: sha512-FIriEB+qqArPhmVYc1PZwRHD99myRdl7C9Oe/uts04Q2LOxQ5MEmqP9XOP8vVYzpDOYwmL8OmL6eOYt9eZlQKQ==} + engines: {node: '>=16.17'} + + '@tapjs/reporter@2.0.8': + resolution: {integrity: sha512-tZn5ZHIrFwjbi59djtdXHBwgSIZSBXdJpz2i9CZ9HEC1nFhWtIr2Jczvrz4ScfixUgA0GNFirz+q+9iA4IFMvw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/run@2.1.7': + resolution: {integrity: sha512-Hk41E68f1x4eLBm6Rrxx4ARzZzrjwaLbKThb16+f3bGYiajmqAvBdeyNEoQpEWmW+Sv2HSlueOk2SS2P4fyetg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/snapshot@2.0.8': + resolution: {integrity: sha512-L0vtqWKkgnQt/XNQkvHOme9Np7ffteCNf1P0F9mz2YiJion4er1nv6pZuJoKVxXFQsbNd2k+LGyx0Iw+bIzwFg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/spawn@2.0.8': + resolution: {integrity: sha512-vCYwynIYJNijY87uHFANe+gCu9rdGoe4GOBmghl6kwDy7eISmcN/FW5TlmrjePMNhTvrDMeYqOIAzqh3WRYmPA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/stack@2.0.1': + resolution: {integrity: sha512-3rKbZkRkLeJl9ilV/6b80YfI4C4+OYf7iEz5/d0MIVhmVvxv0ttIy5JnZutAc4Gy9eRp5Ne5UTAIFOVY5k36cg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + '@tapjs/stdin@2.0.8': + resolution: {integrity: sha512-tW/exLXuDqjtH2wjptiPHXBahkdSyoppxDY56l9MG4tiz66dMN6NTCZFvQxp7+3t+lsQKqJp/74z8T/ayp+vZA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/test@2.2.4': + resolution: {integrity: sha512-QIgq2BhMpwO9SN8I0qlwZYXAllO4xWCfJ0MgAGhc+J7p69B5p9dDNPmyOreHeXWMmk6VlNj3oWveoXb5Zn9xZQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/typescript@1.4.13': + resolution: {integrity: sha512-MNs7zlhM6G3pNUIjkKXDxgNCwCGZt2bUCGtVunSTDVIrKiUlHAl4QSjQ1oTjumHlCi9gFIWiwFAvpHekzFti0w==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tapjs/worker@2.0.8': + resolution: {integrity: sha512-AySf2kV6OHvwgD3DrLdT2az2g4hRdoRtKsFCLdZo3jOoKte+ft/IQJEnOW7CPT0RYUskS3elv6eabYgSyTH4tg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + peerDependencies: + '@tapjs/core': 2.1.6 + + '@tsconfig/node10@1.0.11': + resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} + + '@tsconfig/node12@1.0.11': + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + + '@tsconfig/node14@1.0.3': + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + + '@tsconfig/node14@14.1.2': + resolution: {integrity: sha512-1vncsbfCZ3TBLPxesRYz02Rn7SNJfbLoDVkcZ7F/ixOV6nwxwgdhD1mdPcc5YQ413qBJ8CvMxXMFfJ7oawjo7Q==} + + '@tsconfig/node16@1.0.4': + resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} + + '@tsconfig/node16@16.1.3': + resolution: {integrity: sha512-9nTOUBn+EMKO6rtSZJk+DcqsfgtlERGT9XPJ5PRj/HNENPCBY1yu/JEj5wT6GLtbCLBO2k46SeXDaY0pjMqypw==} + + '@tsconfig/node18@18.2.4': + resolution: {integrity: sha512-5xxU8vVs9/FNcvm3gE07fPbn9tl6tqGGWA9tSlwsUEkBxtRnTsNmwrV8gasZ9F/EobaSv9+nu8AxUKccw77JpQ==} + + '@tsconfig/node20@20.1.4': + resolution: {integrity: sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg==} + + '@tufjs/canonical-json@2.0.0': + resolution: {integrity: sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@tufjs/models@2.0.1': + resolution: {integrity: sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==} + engines: {node: ^16.14.0 || >=18.0.0} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -360,12 +1329,21 @@ packages: '@types/babel__traverse@7.20.5': resolution: {integrity: sha512-WXCyOcRtH37HAUkpXhUduaxdm82b4GSlyTqajXviN4EfiuPgNYR109xMCKvpl6zPIpua0DGlMEDCq+g8EdoheQ==} + '@types/cacheable-request@6.0.3': + resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} + + '@types/cli-progress@3.11.6': + resolution: {integrity: sha512-cE3+jb9WRlu+uOSAugewNpITJDt1VF8dHOopPO4IABFc3SXYL5WE/+PTz/FCdZRRfIujiWW3n3aMbv1eIGVRWA==} + '@types/cookiejar@2.1.5': resolution: {integrity: sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q==} '@types/graceful-fs@4.1.9': resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} + '@types/http-cache-semantics@4.0.4': + resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} + '@types/istanbul-lib-coverage@2.0.6': resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} @@ -378,6 +1356,9 @@ packages: '@types/jest@29.5.12': resolution: {integrity: sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==} + '@types/keyv@3.1.4': + resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==} + '@types/methods@1.1.4': resolution: {integrity: sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ==} @@ -396,6 +1377,9 @@ packages: '@types/phoenix@1.6.5': resolution: {integrity: sha512-xegpDuR+z0UqG9fwHqNoy3rI7JDlvaPh2TY47Fl80oq6g+hXT+c/LEuE43X48clZ6lOfANl5WrPur9fYO1RJ/w==} + '@types/responselike@1.0.3': + resolution: {integrity: sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==} + '@types/stack-utils@2.0.3': resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} @@ -414,22 +1398,81 @@ packages: '@types/yargs@17.0.32': resolution: {integrity: sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==} + '@typescript-eslint/types@5.62.0': + resolution: {integrity: sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@typescript-eslint/typescript-estree@5.62.0': + resolution: {integrity: sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/visitor-keys@5.62.0': + resolution: {integrity: sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + abbrev@2.0.0: + resolution: {integrity: sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn-walk@8.3.3: + resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==} + engines: {node: '>=0.4.0'} + + acorn@8.12.1: + resolution: {integrity: sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==} + engines: {node: '>=0.4.0'} + hasBin: true + + agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + + agent-base@7.1.1: + resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==} + engines: {node: '>= 14'} + agentkeepalive@4.5.0: resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} engines: {node: '>= 8.0.0'} + aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + + amdefine@1.0.1: + resolution: {integrity: sha512-S2Hw0TtNkMJhIabBwIojKL9YHO5T0n5eNqWJ7Lrlel/zDbftQpxpapi8tZs3X1HWa+u+QeydGmzzNU0m09+Rcg==} + engines: {node: '>=0.4.2'} + ansi-escapes@4.3.2: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} engines: {node: '>=8'} + ansi-escapes@6.2.1: + resolution: {integrity: sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==} + engines: {node: '>=14.16'} + ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} + ansi-regex@6.0.1: + resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + engines: {node: '>=12'} + ansi-styles@3.2.1: resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} engines: {node: '>=4'} @@ -442,19 +1485,120 @@ packages: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} engines: {node: '>=10'} + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + ansicolors@0.3.2: + resolution: {integrity: sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==} + anymatch@3.1.3: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} + app-module-path@2.2.0: + resolution: {integrity: sha512-gkco+qxENJV+8vFcDiiFhuoSvRXb2a/QPqpSoWhVz829VNJfOTnELbBmPmNKFxf3xdNnw4DWCkzkDaavcX/1YQ==} + + archiver-utils@2.1.0: + resolution: {integrity: sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==} + engines: {node: '>= 6'} + + archiver-utils@3.0.4: + resolution: {integrity: sha512-KVgf4XQVrTjhyWmx6cte4RxonPLR9onExufI1jhvw/MQ4BB6IsZD5gT8Lq+u/+pRkWna/6JoHpiQioaqFP5Rzw==} + engines: {node: '>= 10'} + + archiver@5.3.2: + resolution: {integrity: sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==} + engines: {node: '>= 10'} + + arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + arrivals@2.1.2: + resolution: {integrity: sha512-g3+rxhxUen2H4+PPBOz6U6pkQ4esBuQPna1rPskgK1jamBdDZeoppyB2vPUM/l0ccunwRrq4r2rKgCvc2FnrFA==} + + artillery-engine-playwright@1.16.0: + resolution: {integrity: sha512-90Gka/neaKABygcWANL/wOrI3U75Xll4yAZmBywQiTONorGL3SIizEEujGXosHLUeOgzc+3OEldP5qXfCynMOg==} + + artillery-plugin-apdex@1.10.0: + resolution: {integrity: sha512-TabM/LXhp5n3AKiCXQHl3ivwCuh7QfdV5vjYpT8di32Rd42f9AahFiOIje4aInW9u5S8qNsB78UU3ov084GxwA==} + + artillery-plugin-ensure@1.13.0: + resolution: {integrity: sha512-/FwOj4a2npaUkNsB+dtHGa5euRqi1ly0mvcqz2UawNia+5SQXVJauL0ue84uQrU0O8ercH/gzsb7cG2/RKYkwg==} + + artillery-plugin-expect@2.13.0: + resolution: {integrity: sha512-j7beHovPaR9b4tKxn6gq6QXXK+XXtZ2B6DzX3PERqPcZPA07zSPrTJfzmtbwmnAGvQZYLK5jucQ6H+ZzyimiQg==} + engines: {node: '>= 14.17.6'} + + artillery-plugin-fake-data@1.10.0: + resolution: {integrity: sha512-EQeeiIGJfxpXszn1zH91EyNprblpkME/HuHYloILExTc6My9+tcY5fezd1SEBbQ+jJ4qKB5KJyqQ6RS6HE+oBQ==} + + artillery-plugin-metrics-by-endpoint@1.13.0: + resolution: {integrity: sha512-1zKp+kIZusPDLIcYE9Yheua5RYekAMNkJr/fQ2odQaeJdSkWyS/gURvroORhYAv41LKRfAvYazW668uUY6WkKA==} + + artillery-plugin-publish-metrics@2.24.0: + resolution: {integrity: sha512-7a6vykigjZ5zdk41ma8supGmownU31SdQRD9hxfpKv8gLIOAlTTD25OxnjGmddF4JArhztSAqrPb+J8s/7xXRw==} + + artillery-plugin-slack@1.8.0: + resolution: {integrity: sha512-BpZZonGQRBZo1oXw0XNx7itoGKlZDClE+SzNt3SDTTFcQuvdPD6FD05Y9hDfSfG3zdEuuc9joAtCuKMmZALaeg==} + + artillery@2.0.19: + resolution: {integrity: sha512-NeD5+D7U5l8hZ3lHtUseFTwqxILN2qfl4XlQt4cH0PukA/wsOri7cR0Qg2925usCa5EkD240Dfh9r9wYvuHxlw==} + engines: {node: '>= 18.16.1'} + hasBin: true + asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + ast-module-types@5.0.0: + resolution: {integrity: sha512-JvqziE0Wc0rXQfma0HZC/aY7URXHFuZV84fJRtP8u+lhp0JYCNd5wJzVXP45t0PH0Mej3ynlzvdyITYIu0G4LQ==} + engines: {node: '>=14'} + + astral-regex@2.0.0: + resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} + engines: {node: '>=8'} + + async-hook-domain@4.0.1: + resolution: {integrity: sha512-bSktexGodAjfHWIrSrrqxqWzf1hWBZBpmPNZv+TYUMyWa2eoefFc6q6H1+KtdHYSz35lrhWdmXt/XK9wNEZvww==} + engines: {node: '>=16'} + + async-limiter@1.0.1: + resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} + + async@1.5.0: + resolution: {integrity: sha512-m9nMwCtLtz29LszVaR0q/FqsJWkrxVoQL95p7JU0us7qUx4WEcySQgwvuneYSGVyvirl81gz7agflS3V1yW14g==} + + async@2.6.4: + resolution: {integrity: sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==} + + async@3.2.5: + resolution: {integrity: sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==} + asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + auto-bind@5.0.1: + resolution: {integrity: sha512-ooviqdwwgfIfNmDwo94wlshcdzfO64XV0Cg6oDsDYBJfITDz1EngD2z7DkbvCWn+XIMsIqW27sEVF6qcpJrRcg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + available-typed-arrays@1.0.7: + resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} + engines: {node: '>= 0.4'} + + aws-sdk@2.1674.0: + resolution: {integrity: sha512-VTijN8+pKrf4sfM2t+ISXjypJ+k3AiP6OMzyLoWJ7jfMBtBfWbQc1rN07OndNb0CZRBBukOHoBhYDPuyae+/1Q==} + engines: {node: '>= 10.0.0'} + + axios@0.27.2: + resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} + babel-jest@29.7.0: resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -483,13 +1627,44 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + bignumber.js@9.1.2: + resolution: {integrity: sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + bindings@1.5.0: + resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + + bintrees@1.0.2: + resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} + + bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + + boolbase@1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + + bowser@2.11.0: + resolution: {integrity: sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==} + brace-expansion@1.1.11: resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + braces@3.0.2: resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} engines: {node: '>=8'} + browser-or-node@1.3.0: + resolution: {integrity: sha512-0F2z/VSnLbmEeBcUrSuDH5l0HxTXdQQzLjkmBR4cYfvg1zJrKSlmIZFqyFR8oX0NrwPhy3c3HQ6i3OxMbew4Tg==} + browserslist@4.23.0: resolution: {integrity: sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} @@ -502,9 +1677,38 @@ packages: bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + buffer-crc32@0.2.13: + resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==} + + buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + buffer@4.9.2: + resolution: {integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==} + + buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + + c8@9.1.0: + resolution: {integrity: sha512-mBWcT5iqNir1zIkzSPyI3NCR9EZCVI3WUD+AVO17MVWTSFNyUueXE82qTeampNtTr+ilN/5Ua3j24LgbCKjDVg==} + engines: {node: '>=14.14.0'} + hasBin: true + + cacache@18.0.4: + resolution: {integrity: sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + cacheable-lookup@5.0.4: + resolution: {integrity: sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==} + engines: {node: '>=10.6.0'} + + cacheable-request@7.0.4: + resolution: {integrity: sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==} + engines: {node: '>=8'} + call-bind@1.0.7: resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==} engines: {node: '>= 0.4'} @@ -524,18 +1728,45 @@ packages: caniuse-lite@1.0.30001615: resolution: {integrity: sha512-1IpazM5G3r38meiae0bHRnPhz+CBQ3ZLqbQMtrg+AsTPKAXgW38JNsXkyZ+v8waCsDmPq87lmfun5Q2AGysNEQ==} + cardinal@2.1.1: + resolution: {integrity: sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==} + hasBin: true + chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} + chalk@3.0.0: + resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} + engines: {node: '>=8'} + chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} + chalk@5.3.0: + resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + char-regex@1.0.2: resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} engines: {node: '>=10'} + cheerio-select@2.1.0: + resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} + + cheerio@1.0.0: + resolution: {integrity: sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww==} + engines: {node: '>=18.17'} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + chownr@2.0.0: + resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} + engines: {node: '>=10'} + ci-info@3.9.0: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} @@ -543,14 +1774,61 @@ packages: cjs-module-lexer@1.3.1: resolution: {integrity: sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==} + clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + + clean-stack@3.0.1: + resolution: {integrity: sha512-lR9wNiMRcVQjSB3a7xXGLuz4cr4wJuuXlaAEbRutGowQTmlp7R72/DOgN21e8jdwblMWl9UOJMJXarX94pzKdg==} + engines: {node: '>=10'} + + cli-boxes@3.0.0: + resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==} + engines: {node: '>=10'} + + cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + + cli-cursor@4.0.0: + resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + cli-progress@3.12.0: + resolution: {integrity: sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A==} + engines: {node: '>=4'} + + cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + + cli-table3@0.6.5: + resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} + engines: {node: 10.* || >= 12.*} + + cli-truncate@3.1.0: + resolution: {integrity: sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} + clone-response@1.0.3: + resolution: {integrity: sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==} + + clone@1.0.4: + resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} + engines: {node: '>=0.8'} + co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + code-excerpt@4.0.0: + resolution: {integrity: sha512-xxodCmBen3iy2i0WtAK8FlFNrRzjUqjRsMfho58xT/wvZU1YTM3fCnRjcy1gJPMepaRlgm/0e6w8SpWHpn3/cA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + collect-v8-coverage@1.0.2: resolution: {integrity: sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==} @@ -571,27 +1849,86 @@ packages: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} + commander@10.0.1: + resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} + engines: {node: '>=14'} + component-emitter@1.3.1: resolution: {integrity: sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==} + compress-commons@4.1.2: + resolution: {integrity: sha512-D3uMHtGc/fcO1Gt1/L7i1e33VOvD4A9hfQLP+6ewd+BvG/gQ84Yh4oftEhAdjSMgBgwGL+jsppT7JYNpo6MHHg==} + engines: {node: '>= 10'} + concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + convert-to-spaces@2.0.1: + resolution: {integrity: sha512-rcQ1bsQO9799wq24uE5AM2tAILy4gXGIK/njFWcVQkGNZ96edlpY+A7bjwvzjYvLDyzmG1MmMLZhpcsb+klNMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + cookie-parser@1.4.6: + resolution: {integrity: sha512-z3IzaNjdwUC2olLIB5/ITd0/setiaFMLYiZJle7xg5Fe9KWAceil7xszYfHHBtDFYLSgJduS2Ty0P1uJdPDJeA==} + engines: {node: '>= 0.8.0'} + + cookie-signature@1.0.6: + resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} + + cookie@0.4.1: + resolution: {integrity: sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==} + engines: {node: '>= 0.6'} + cookiejar@2.1.4: resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==} + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + + crc-32@1.2.2: + resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} + engines: {node: '>=0.8'} + hasBin: true + + crc32-stream@4.0.3: + resolution: {integrity: sha512-NT7w2JVU7DFroFdYkeq8cywxrgjPHWkdX1wjpRQXPX5Asews3tA+Ght6lddQO5Mkumffp3X7GEqku3epj2toIw==} + engines: {node: '>= 10'} + create-jest@29.7.0: resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true + create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} + css-select@5.1.0: + resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==} + + css-what@6.1.0: + resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} + engines: {node: '>= 6'} + + csv-parse@4.16.3: + resolution: {integrity: sha512-cO1I/zmz4w2dcKHVvpCr7JVRu8/FymG5OEpmvsZYlccYolPBLoVGKUHgNoc4ZGkFeFlWGEDmMyBM+TTqRdW/wg==} + + datadog-metrics@0.9.3: + resolution: {integrity: sha512-BVsBX2t+4yA3tHs7DnB5H01cHVNiGJ/bHA8y6JppJDyXG7s2DLm6JaozPGpgsgVGd42Is1CHRG/yMDQpt877Xg==} + + debug@3.1.0: + resolution: {integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -601,6 +1938,10 @@ packages: supports-color: optional: true + decompress-response@6.0.0: + resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} + engines: {node: '>=10'} + dedent@1.5.3: resolution: {integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==} peerDependencies: @@ -609,22 +1950,78 @@ packages: babel-plugin-macros: optional: true + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + deep-for-each@3.0.0: + resolution: {integrity: sha512-pPN+0f8jlnNP+z90qqOdxGghJU5XM6oBDhvAR+qdQzjCg5pk/7VPPvKK1GqoXEFkHza6ZS+Otzzvmr0g3VUaKw==} + deepmerge@4.3.1: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} + defaults@1.0.4: + resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + + defer-to-connect@2.0.1: + resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} + engines: {node: '>=10'} + define-data-property@1.1.4: resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} engines: {node: '>= 0.4'} + define-lazy-prop@2.0.0: + resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} + engines: {node: '>=8'} + delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dependency-tree@10.0.9: + resolution: {integrity: sha512-dwc59FRIsht+HfnTVM0BCjJaEWxdq2YAvEDy4/Hn6CwS3CBWMtFnL3aZGAkQn3XCYxk/YcTDE4jX2Q7bFTwCjA==} + engines: {node: '>=14'} + hasBin: true + detect-newline@3.1.0: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} engines: {node: '>=8'} + detective-amd@5.0.2: + resolution: {integrity: sha512-XFd/VEQ76HSpym80zxM68ieB77unNuoMwopU2TFT/ErUk5n4KvUTwW4beafAVUugrjV48l4BmmR0rh2MglBaiA==} + engines: {node: '>=14'} + hasBin: true + + detective-cjs@5.0.1: + resolution: {integrity: sha512-6nTvAZtpomyz/2pmEmGX1sXNjaqgMplhQkskq2MLrar0ZAIkHMrDhLXkRiK2mvbu9wSWr0V5/IfiTrZqAQMrmQ==} + engines: {node: '>=14'} + + detective-es6@4.0.1: + resolution: {integrity: sha512-k3Z5tB4LQ8UVHkuMrFOlvb3GgFWdJ9NqAa2YLUU/jTaWJIm+JJnEh4PsMc+6dfT223Y8ACKOaC0qcj7diIhBKw==} + engines: {node: '>=14'} + + detective-postcss@6.1.3: + resolution: {integrity: sha512-7BRVvE5pPEvk2ukUWNQ+H2XOq43xENWbH0LcdCE14mwgTBEAMoAx+Fc1rdp76SmyZ4Sp48HlV7VedUnP6GA1Tw==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + detective-sass@5.0.3: + resolution: {integrity: sha512-YsYT2WuA8YIafp2RVF5CEfGhhyIVdPzlwQgxSjK+TUm3JoHP+Tcorbk3SfG0cNZ7D7+cYWa0ZBcvOaR0O8+LlA==} + engines: {node: '>=14'} + + detective-scss@4.0.3: + resolution: {integrity: sha512-VYI6cHcD0fLokwqqPFFtDQhhSnlFWvU614J42eY6G0s8c+MBhi9QAWycLwIOGxlmD8I/XvGSOUV1kIDhJ70ZPg==} + engines: {node: '>=14'} + + detective-stylus@4.0.0: + resolution: {integrity: sha512-TfPotjhszKLgFBzBhTOxNHDsutIxx9GTWjrL5Wh7Qx/ydxKhwUrlSFeLIn+ZaHPF+h0siVBkAQSuy6CADyTxgQ==} + engines: {node: '>=14'} + + detective-typescript@11.2.0: + resolution: {integrity: sha512-ARFxjzizOhPqs1fYC/2NMC3N4jrQ6HvVflnXBTRqNEqJuXwyKLRr9CrJwkRcV/SnZt1sNXgsF6FPm0x57Tq0rw==} + engines: {node: ^14.14.0 || >=16.0.0} + dezalgo@1.0.4: resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} @@ -632,10 +2029,53 @@ packages: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + + diff@5.2.0: + resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==} + engines: {node: '>=0.3.1'} + + dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + + dogapi@2.8.4: + resolution: {integrity: sha512-065fsvu5dB0o4+ENtLjZILvXMClDNH/yA9H6L8nsdcNiz9l0Hzpn7aQaCOPYXxqyzq4CRPOdwkFXUjDOXfRGbg==} + hasBin: true + + dom-serializer@2.0.0: + resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + + domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + + domhandler@5.0.3: + resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} + engines: {node: '>= 4'} + + domutils@3.1.0: + resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + dotenv@16.4.5: resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} engines: {node: '>=12'} + driftless@2.0.3: + resolution: {integrity: sha512-hSDKsQphnL4O0XLAiyWQ8EiM9suXH0Qd4gMtwF86b5wygGV8r95w0JcA38FOmx9N3LjFCIHLG2winLPNken4Tg==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + + ejs@3.1.10: + resolution: {integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==} + engines: {node: '>=0.10.0'} + hasBin: true + electron-to-chromium@1.4.754: resolution: {integrity: sha512-7Kr5jUdns5rL/M9wFFmMZAgFDuL2YOnanFH4OI4iFzUqyh3XOL7nAGbSlSMZdzKMIyyTpNSbqZsWG9odwLeKvA==} @@ -646,6 +2086,43 @@ packages: emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + encoding-sniffer@0.2.0: + resolution: {integrity: sha512-ju7Wq1kg04I3HtiYIOrUrdfdDvkyO9s5XM8QAj/bN61Yo/Vb4vgJxy5vi4Yxk01gWHbrofpPtpxM8bKger9jhg==} + + encoding@0.1.13: + resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} + + end-of-stream@1.4.4: + resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + + engine.io-client@6.5.4: + resolution: {integrity: sha512-GeZeeRjpD2qf49cZQ0Wvh/8NJNfeXkXXcoGh+F77oEAgo9gUHwT1fCRxSNU+YEEaysOJTnsFHmM5oAcPy4ntvQ==} + + engine.io-parser@5.2.3: + resolution: {integrity: sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==} + engines: {node: '>=10.0.0'} + + enhanced-resolve@5.17.1: + resolution: {integrity: sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==} + engines: {node: '>=10.13.0'} + + ensure-posix-path@1.1.1: + resolution: {integrity: sha512-VWU0/zXzVbeJNXvME/5EmLuEj2TauvoaTz6aFYK1Z92JCBlDlZ3Gu0tuGR42kpW1754ywTs+QB0g5TP0oj9Zaw==} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + env-paths@2.2.1: + resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} + engines: {node: '>=6'} + + err-code@2.0.3: + resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} @@ -657,6 +2134,11 @@ packages: resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} engines: {node: '>= 0.4'} + esbuild-wasm@0.19.12: + resolution: {integrity: sha512-Zmc4hk6FibJZBcTx5/8K/4jT3/oG1vkGTEeKJUQFCUQKimD6Q7+adp/bdVQyYJFolMKaXkQnVZdV4O5ZaTYmyQ==} + engines: {node: '>=12'} + hasBin: true + escalade@3.1.2: resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==} engines: {node: '>=6'} @@ -669,15 +2151,58 @@ packages: resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} engines: {node: '>=8'} + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + escodegen@2.1.0: + resolution: {integrity: sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==} + engines: {node: '>=6.0'} + hasBin: true + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} hasBin: true + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + event-target-shim@5.0.1: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} + eventemitter3@1.1.1: + resolution: {integrity: sha512-idmH3G0vJjQv2a5N74b+oXcOUKYBqSGJGN1eVV6ELGdUnesAO8RZsU74eaS3VfldRet8N9pFupxppBUKztrBdQ==} + + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + + events-to-array@2.0.3: + resolution: {integrity: sha512-f/qE2gImHRa4Cp2y1stEOSgw8wTFyUdVJX7G//bMwbaV9JqISFxg99NbmVQeP7YLnDUZ2un851jlaDrlpmGehQ==} + engines: {node: '>=12'} + + events@1.1.1: + resolution: {integrity: sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==} + engines: {node: '>=0.4.x'} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + execa@5.1.1: resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} engines: {node: '>=10'} @@ -690,26 +2215,97 @@ packages: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + exponential-backoff@3.1.1: + resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==} + + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.2: + resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + engines: {node: '>=8.6.0'} + fast-json-stable-stringify@2.1.0: resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + fast-levenshtein@3.0.0: + resolution: {integrity: sha512-hKKNajm46uNmTlhHSyZkmToAc56uZJwYq7yrciZjqOxnlfQwERDQJmHPUp7m1m9wx8vgOe8IaCKZ5Kv2k1DdCQ==} + fast-safe-stringify@2.1.1: resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + fast-xml-parser@4.4.1: + resolution: {integrity: sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==} + hasBin: true + + fastest-levenshtein@1.0.16: + resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} + engines: {node: '>= 4.9.1'} + + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + fb-watchman@2.0.2: resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + fflate@0.8.1: + resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} + + file-uri-to-path@1.0.0: + resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} + + filelist@1.0.4: + resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} + + filing-cabinet@4.2.0: + resolution: {integrity: sha512-YZ21ryzRcyqxpyKggdYSoXx//d3sCJzM3lsYoaeg/FyXdADGJrUl+BW1KIglaVLJN5BBcMtWylkygY8zBp2MrQ==} + engines: {node: '>=14'} + hasBin: true + fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} + filtrex@0.5.4: + resolution: {integrity: sha512-2phGAjWOYRf96Al6s+w/hMjObP1cRyQ95hoZApjeFO75DXN4Flh9uuUAtL3LI4fkryLa2QWdA8MArvt0GMU0pA==} + + filtrex@2.2.3: + resolution: {integrity: sha512-TL12R6SckvJdZLibXqyp4D//wXZNyCalVYGqaWwQk9zucq9dRxmrJV4oyuRq4PHFHCeV5ZdzncIc/Ybqv1Lr6Q==} + find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + follow-redirects@1.15.6: + resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + for-each@0.3.3: + resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} + + foreground-child@3.3.0: + resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==} + engines: {node: '>=14'} + form-data-encoder@1.7.2: resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} + form-data@3.0.1: + resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} + engines: {node: '>= 6'} + form-data@4.0.0: resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} engines: {node: '>= 6'} @@ -721,6 +2317,24 @@ packages: formidable@3.5.1: resolution: {integrity: sha512-WJWKelbRHN41m5dumb0/k8TeAx7Id/y3a+Z7QfhxP/htI9Js5zYaEDtG8uMgG0vM0lOlqnmjE99/kfpOYi/0Og==} + fromentries@1.3.2: + resolution: {integrity: sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==} + + fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + + fs-extra@10.1.0: + resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} + engines: {node: '>=12'} + + fs-minipass@2.1.0: + resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} + engines: {node: '>= 8'} + + fs-minipass@3.0.3: + resolution: {integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -737,10 +2351,17 @@ packages: function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + function-loop@4.0.0: + resolution: {integrity: sha512-f34iQBedYF3XcI93uewZZOnyscDragxgTK/eTvVB74k3fCD0ZorOi5BV9GS4M8rz/JoNi0Kl3qX5Y9MH3S/CLQ==} + gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} + get-amd-module-type@5.0.1: + resolution: {integrity: sha512-jb65zDeHyDjFR1loOVk0HQGM5WNwoGB8aLWy3LKCieMKol0/ProHkhO2X1JxojuN10vbz1qNn09MJ7tNp7qMzw==} + engines: {node: '>=14'} + get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} @@ -749,14 +2370,29 @@ packages: resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} engines: {node: '>= 0.4'} + get-own-enumerable-property-symbols@3.0.2: + resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} + get-package-type@0.1.0: resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} engines: {node: '>=8.0.0'} + get-stream@5.2.0: + resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==} + engines: {node: '>=8'} + get-stream@6.0.1: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + hasBin: true + glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} deprecated: Glob versions prior to v9 are no longer supported @@ -765,9 +2401,25 @@ packages: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} engines: {node: '>=4'} + globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + + gonzales-pe@4.3.0: + resolution: {integrity: sha512-otgSPpUmdWJ43VXyiNgEYE4luzHCL2pz4wQ0OnDluC6Eg4Ko3Vexy/SrSynglw/eR+OhkzmqFCZa/OFa/RgAOQ==} + engines: {node: '>=0.6.0'} + hasBin: true + + google-protobuf@3.6.1: + resolution: {integrity: sha512-SJYemeX5GjDLPnadcmCNQePQHCS4Hl5fOcI/JawqDIYFhCmrtYAjcx/oTQx/Wi8UuCuZQhfvftbmPePPAYHFtA==} + gopd@1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} + got@11.8.6: + resolution: {integrity: sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==} + engines: {node: '>=10.19.0'} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -790,17 +2442,61 @@ packages: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} engines: {node: '>= 0.4'} + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + hasown@2.0.2: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} + hex2dec@1.0.1: + resolution: {integrity: sha512-F9QO0+ZI8r1VZudxw21bD/U5pb2Y9LZY3TsnVqCPaijvw5mIhH5jsH29acLPijl5fECfD8FetJtgX8GN5YPM9Q==} + hexoid@1.0.0: resolution: {integrity: sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==} engines: {node: '>=8'} + hosted-git-info@7.0.2: + resolution: {integrity: sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==} + engines: {node: ^16.14.0 || >=18.0.0} + + hot-shots@6.8.7: + resolution: {integrity: sha512-XH8iezBSZgVw2jegu96pUfF1Zv0VZ/iXjb7L5yE3F7mn7/bdhf4qeniXjO0wQWeefe433rhOsazNKLxM+XMI9w==} + engines: {node: '>=6.0.0'} + + hpagent@0.1.2: + resolution: {integrity: sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ==} + html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + htmlparser2@9.1.0: + resolution: {integrity: sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==} + + http-cache-semantics@4.1.1: + resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} + + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} + + http2-wrapper@1.0.3: + resolution: {integrity: sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==} + engines: {node: '>=10.19.0'} + + https-proxy-agent@5.0.0: + resolution: {integrity: sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==} + engines: {node: '>= 6'} + + https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + + https-proxy-agent@7.0.5: + resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==} + engines: {node: '>= 14'} + human-signals@2.1.0: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} @@ -808,6 +2504,28 @@ packages: humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + hyperlinker@1.0.0: + resolution: {integrity: sha512-Ty8UblRWFEcfSuIaajM34LdPXIhbs1ajEX/BBPv24J+enSVaEVY63xQ6lTO9VRYS5LAoghIG0IDJ+p+IPzKUQQ==} + engines: {node: '>=4'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + ieee754@1.1.13: + resolution: {integrity: sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + + ignore-walk@6.0.5: + resolution: {integrity: sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + import-local@3.1.0: resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} engines: {node: '>=8'} @@ -817,6 +2535,14 @@ packages: resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} engines: {node: '>=0.8.19'} + indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + + indent-string@5.0.0: + resolution: {integrity: sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==} + engines: {node: '>=12'} + inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. @@ -824,31 +2550,150 @@ packages: inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + + ini@4.1.3: + resolution: {integrity: sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + ink@4.4.1: + resolution: {integrity: sha512-rXckvqPBB0Krifk5rn/5LvQGmyXwCUpBfmTwbkQNBY9JY8RSl3b8OftBNEYxg4+SWUhEKcPifgope28uL9inlA==} + engines: {node: '>=14.16'} + peerDependencies: + '@types/react': '>=18.0.0' + react: '>=18.0.0' + react-devtools-core: ^4.19.1 + peerDependenciesMeta: + '@types/react': + optional: true + react-devtools-core: + optional: true + + ip-address@9.0.5: + resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} + engines: {node: '>= 12'} + + is-actual-promise@1.0.2: + resolution: {integrity: sha512-xsFiO1of0CLsQnPZ1iXHNTyR9YszOeWKYv+q6n8oSFW3ipooFJ1j1lbRMgiMCr+pp2gLruESI4zb5Ak6eK5OnQ==} + + is-arguments@1.1.1: + resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} + engines: {node: '>= 0.4'} + is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-callable@1.2.7: + resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} + engines: {node: '>= 0.4'} + + is-ci@3.0.1: + resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} + hasBin: true + is-core-module@2.13.1: resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==} + is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + is-fullwidth-code-point@3.0.0: resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} engines: {node: '>=8'} + is-fullwidth-code-point@4.0.0: + resolution: {integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==} + engines: {node: '>=12'} + is-generator-fn@2.1.0: resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} engines: {node: '>=6'} + is-generator-function@1.0.10: + resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} + engines: {node: '>= 0.4'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-interactive@1.0.0: + resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} + engines: {node: '>=8'} + + is-lambda@1.0.1: + resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} + + is-lower-case@2.0.2: + resolution: {integrity: sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==} + is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + is-obj@1.0.1: + resolution: {integrity: sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==} + engines: {node: '>=0.10.0'} + + is-plain-object@5.0.0: + resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} + engines: {node: '>=0.10.0'} + + is-regexp@1.0.0: + resolution: {integrity: sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==} + engines: {node: '>=0.10.0'} + + is-relative-path@1.0.2: + resolution: {integrity: sha512-i1h+y50g+0hRbBD+dbnInl3JlJ702aar58snAeX+MxBAPvzXGej7sYoPMhlnykabt0ZzCJNBEyzMlekuQZN7fA==} + is-stream@2.0.1: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} + is-typed-array@1.1.13: + resolution: {integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==} + engines: {node: '>= 0.4'} + + is-upper-case@2.0.2: + resolution: {integrity: sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==} + + is-url-superb@4.0.0: + resolution: {integrity: sha512-GI+WjezhPPcbM+tqE9LnmsY5qqjwHzTvjJ36wxYX5ujNXefSUJ/T17r5bqDV8yLhcgB59KTPNOc9O9cmHTPWsA==} + engines: {node: '>=10'} + + is-url@1.2.4: + resolution: {integrity: sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==} + + is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + isexe@3.1.1: + resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==} + engines: {node: '>=16'} + + isomorphic-ws@4.0.1: + resolution: {integrity: sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==} + peerDependencies: + ws: '*' + istanbul-lib-coverage@3.2.2: resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} engines: {node: '>=8'} @@ -873,6 +2718,14 @@ packages: resolution: {integrity: sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==} engines: {node: '>=8'} + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + + jake@10.9.2: + resolution: {integrity: sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==} + engines: {node: '>=10'} + hasBin: true + jest-changed-files@29.7.0: resolution: {integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -1002,6 +2855,13 @@ packages: node-notifier: optional: true + jmespath@0.16.0: + resolution: {integrity: sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==} + engines: {node: '>= 0.6.0'} + + joi@17.13.3: + resolution: {integrity: sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -1009,27 +2869,78 @@ packages: resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} hasBin: true + jsbn@1.1.0: + resolution: {integrity: sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==} + jsesc@2.5.2: resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} engines: {node: '>=4'} hasBin: true + json-bigint@1.0.0: + resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==} + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + json-parse-even-better-errors@3.0.2: + resolution: {integrity: sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + json5@2.2.3: resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} engines: {node: '>=6'} hasBin: true + jsonfile@6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + + jsonparse@1.3.1: + resolution: {integrity: sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==} + engines: {'0': node >= 0.2.0} + + jsonpath-plus@7.2.0: + resolution: {integrity: sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==} + engines: {node: '>=12.0.0'} + + jsonwebtoken@9.0.2: + resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + engines: {node: '>=12', npm: '>=6'} + + jwa@1.4.1: + resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} + + jwa@2.0.0: + resolution: {integrity: sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==} + + jws@3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + + jws@4.0.0: + resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} + lazystream@1.0.1: + resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} + engines: {node: '>= 0.6.3'} + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} + lightstep-tracer@0.31.2: + resolution: {integrity: sha512-DRdyUrASPkr+hxyHQJ9ImPSIxpUCpqQvfgHwxoZ42G6iEJ2g0/2chCw39tuz60JUmLfTlVp1LFzLscII6YPRoA==} + engines: {node: '>=8.0.0'} + lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} @@ -1037,9 +2948,73 @@ packages: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} engines: {node: '>=8'} + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + lodash.defaults@4.2.0: + resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} + + lodash.difference@4.5.0: + resolution: {integrity: sha512-dS2j+W26TQ7taQBGN8Lbbq04ssV3emRw4NY58WErlTO29pIqS0HmoT5aJ9+TUQ1N3G+JOZSji4eugsWwGp9yPA==} + + lodash.flatten@4.4.0: + resolution: {integrity: sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==} + + lodash.includes@4.3.0: + resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + + lodash.isboolean@3.0.3: + resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} + + lodash.isinteger@4.0.4: + resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} + + lodash.isnumber@3.0.3: + resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==} + + lodash.isplainobject@4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + + lodash.isstring@4.0.1: + resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + lodash.memoize@4.1.2: resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + lodash.once@4.1.1: + resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} + + lodash.union@4.6.0: + resolution: {integrity: sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==} + + lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + + log-symbols@3.0.0: + resolution: {integrity: sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==} + engines: {node: '>=8'} + + long@5.2.3: + resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==} + + loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + + lowercase-keys@2.0.0: + resolution: {integrity: sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==} + engines: {node: '>=8'} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -1050,12 +3025,23 @@ packages: make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + make-fetch-happen@13.0.1: + resolution: {integrity: sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==} + engines: {node: ^16.14.0 || >=18.0.0} + makeerror@1.0.12: resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + matcher-collection@1.1.2: + resolution: {integrity: sha512-YQ/teqaOIIfUHedRam08PB3NK7Mjct6BvzRnJmpGDm8uFXpNr1sbY4yuflI5JcEs6COpYA0FpRQhSDBf1tT95g==} + merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + methods@1.1.2: resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} engines: {node: '>= 0.6'} @@ -1081,18 +3067,131 @@ packages: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} + mimic-response@1.0.1: + resolution: {integrity: sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==} + engines: {node: '>=4'} + + mimic-response@3.1.0: + resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} + engines: {node: '>=10'} + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass-collect@2.0.1: + resolution: {integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass-fetch@3.0.5: + resolution: {integrity: sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + minipass-flush@1.0.5: + resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} + engines: {node: '>= 8'} + + minipass-json-stream@1.0.2: + resolution: {integrity: sha512-myxeeTm57lYs8pH2nxPzmEEg8DGIgW+9mv6D4JZD2pa81I/OBjeU7PtICXV6c9eRGTA5JMDsuIPUZRCyBMYNhg==} + + minipass-pipeline@1.2.4: + resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} + engines: {node: '>=8'} + + minipass-sized@1.0.3: + resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==} + engines: {node: '>=8'} + + minipass@3.3.6: + resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} + engines: {node: '>=8'} + + minipass@5.0.0: + resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} + engines: {node: '>=8'} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + minizlib@2.1.2: + resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} + engines: {node: '>= 8'} + + mixpanel@0.13.0: + resolution: {integrity: sha512-YOWmpr/o4+zJ8LPjuLUkWLc2ImFeIkX6hF1t62Wlvq6loC6e8EK8qieYO4gYPTPxxtjAryl7xmIvf/7qnPwjrQ==} + engines: {node: '>=10.0'} + + mkdirp@0.5.6: + resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} + hasBin: true + + mkdirp@1.0.4: + resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} + engines: {node: '>=10'} + hasBin: true + + mkdirp@3.0.1: + resolution: {integrity: sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==} + engines: {node: '>=10'} + hasBin: true + + module-definition@5.0.1: + resolution: {integrity: sha512-kvw3B4G19IXk+BOXnYq/D/VeO9qfHaapMeuS7w7sNUqmGaA6hywdFHMi+VWeR9wUScXM7XjoryTffCZ5B0/8IA==} + engines: {node: '>=14'} + hasBin: true + + module-lookup-amd@8.0.5: + resolution: {integrity: sha512-vc3rYLjDo5Frjox8NZpiyLXsNWJ5BWshztc/5KSOMzpg9k5cHH652YsJ7VKKmtM4SvaxuE9RkrYGhiSjH3Ehow==} + engines: {node: '>=14'} + hasBin: true + + moment@2.30.1: + resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} + + ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + + nan@2.20.0: + resolution: {integrity: sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==} + + nanoid@3.3.7: + resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + nanotimer@0.3.14: + resolution: {integrity: sha512-NpKXdP6ZLwZcODvDeyfoDBVoncbrgvC12txO3F4l9BxMycQjZD29AnasGAy7uSi3dcsTGnGn6/zzvQRwbjS4uw==} + natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + natural-orderby@2.0.3: + resolution: {integrity: sha512-p7KTHxU0CUrcOXe62Zfrb5Z13nLvPhSWR/so3kFulUQU0sgUll2Z0LwpsLN351eOOD+hRGu/F1g+6xDfPeD++Q==} + + negotiator@0.6.3: + resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} + engines: {node: '>= 0.6'} + node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} @@ -1106,23 +3205,80 @@ packages: encoding: optional: true + node-gyp@10.2.0: + resolution: {integrity: sha512-sp3FonBAaFe4aYTcFdZUn2NYkbP7xroPGYvQmP4Nl5PxamznItBnNCgjrVTKrEfQynInMsJvZrdmqUnysCJ8rw==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} node-releases@2.0.14: resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + node-source-walk@6.0.2: + resolution: {integrity: sha512-jn9vOIK/nfqoFCcpK89/VCVaLg1IHE6UVfDOzvqmANaJ/rWCTEdH8RZ1V278nv2jr36BJdyQXIAavBLXpzdlag==} + engines: {node: '>=14'} + + nopt@7.2.1: + resolution: {integrity: sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + normalize-package-data@6.0.2: + resolution: {integrity: sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==} + engines: {node: ^16.14.0 || >=18.0.0} + normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} + normalize-url@6.1.0: + resolution: {integrity: sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==} + engines: {node: '>=10'} + + npm-bundled@3.0.1: + resolution: {integrity: sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-install-checks@6.3.0: + resolution: {integrity: sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-normalize-package-bin@3.0.1: + resolution: {integrity: sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-package-arg@11.0.3: + resolution: {integrity: sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-packlist@8.0.2: + resolution: {integrity: sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-pick-manifest@9.1.0: + resolution: {integrity: sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-registry-fetch@16.2.1: + resolution: {integrity: sha512-8l+7jxhim55S85fjiDGJ1rZXBWGtRLi1OSb4Z3BPLObPuIaeKRlPRiYMSHU4/81ck3t71Z+UwDDl47gcpmfQQA==} + engines: {node: ^16.14.0 || >=18.0.0} + npm-run-path@4.0.1: resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} engines: {node: '>=8'} + nth-check@2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + object-inspect@1.13.1: resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} + object-treeify@1.1.33: + resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} + engines: {node: '>= 10'} + once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -1130,10 +3286,30 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + open@8.4.2: + resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} + engines: {node: '>=12'} + openai@4.52.2: resolution: {integrity: sha512-mMc0XgFuVSkcm0lRIi8zaw++otC82ZlfkCur1qguXYWPETr/+ZwL9A/vvp3YahX+shpaT6j03dwsmUyLAfmEfg==} hasBin: true + opener@1.5.2: + resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} + hasBin: true + + opentracing@0.14.7: + resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} + engines: {node: '>=0.10'} + + ora@4.1.1: + resolution: {integrity: sha512-sjYP8QyVWBpBZWD6Vr1M/KwknSw6kJOz41tvGMlwWeClHBtYKTbHMki1PsLZnxKpXMPbTKv9b3pjQu3REib96A==} + engines: {node: '>=8'} + + p-cancelable@2.1.1: + resolution: {integrity: sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==} + engines: {node: '>=8'} + p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} engines: {node: '>=6'} @@ -1146,14 +3322,46 @@ packages: resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} engines: {node: '>=8'} + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} + package-json-from-dist@1.0.0: + resolution: {integrity: sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==} + + pacote@17.0.7: + resolution: {integrity: sha512-sgvnoUMlkv9xHwDUKjKQFXVyUi8dtJGKp3vg6sYy+TxbDic5RjZCHF3ygv0EJgNRZ2GfRONjlKPUfokJ9lDpwQ==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} + parse5-htmlparser2-tree-adapter@7.0.0: + resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} + + parse5-parser-stream@7.1.2: + resolution: {integrity: sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==} + + parse5@7.1.2: + resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} + + password-prompt@1.1.3: + resolution: {integrity: sha512-HkrjG2aJlvF0t2BMH0e2LB/EHf3Lcq3fNMzy4GYHcQblAvOl+QQji1Lx7WRBMqpVK8p+KR7bCg7oqAMXtdgqyw==} + + patch-console@2.0.0: + resolution: {integrity: sha512-0YNdUceMdaQwoKce1gatDScmMo5pu/tfABfnzEqeG0gtTmd7mh/WcwgUjtAeOU7N8nFFlbQBnFK2gXW5fGvmMA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} @@ -1169,9 +3377,20 @@ packages: path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + picocolors@1.0.1: + resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} @@ -1189,41 +3408,227 @@ packages: engines: {node: '>=18'} hasBin: true + playwright-core@1.45.3: + resolution: {integrity: sha512-+ym0jNbcjikaOwwSZycFbwkWgfruWvYlJfThKYAlImbxUgdWFO2oW70ojPm4OpE4t6TAo2FY/smM+hpVTtkhDA==} + engines: {node: '>=18'} + hasBin: true + playwright@1.45.0: resolution: {integrity: sha512-4z3ac3plDfYzGB6r0Q3LF8POPR20Z8D0aXcxbJvmfMgSSq1hkcgvFRXJk9rUq5H/MJ0Ktal869hhOdI/zUTeLA==} engines: {node: '>=18'} hasBin: true + playwright@1.45.3: + resolution: {integrity: sha512-QhVaS+lpluxCaioejDZ95l4Y4jSFCsBvl2UZkpeXlzxmqS+aABr5c82YmfMHrL6x27nvrvykJAFpkzT2eWdJww==} + engines: {node: '>=18'} + hasBin: true + + polite-json@4.0.1: + resolution: {integrity: sha512-8LI5ZeCPBEb4uBbcYKNVwk4jgqNx1yHReWoW4H4uUihWlSqZsUDfSITrRhjliuPgxsNPFhNSudGO2Zu4cbWinQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + polite-json@5.0.0: + resolution: {integrity: sha512-OLS/0XeUAcE8a2fdwemNja+udKgXNnY6yKVIXqAD2zVRx1KvY6Ato/rZ2vdzbxqYwPW0u6SCNC/bAMPNzpzxbw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + possible-typed-array-names@1.0.0: + resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==} + engines: {node: '>= 0.4'} + + postcss-values-parser@6.0.2: + resolution: {integrity: sha512-YLJpK0N1brcNJrs9WatuJFtHaV9q5aAOj+S4DI5S7jgHlRfm0PIbDCAFRYMQD5SHq7Fy6xsDhyutgS0QOAs0qw==} + engines: {node: '>=10'} + peerDependencies: + postcss: ^8.2.9 + + postcss@8.4.41: + resolution: {integrity: sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==} + engines: {node: ^10 || ^12 || >=14} + + posthog-node@2.6.0: + resolution: {integrity: sha512-/BiFw/jwdP0uJSRAIoYqLoBTjZ612xv74b1L/a3T/p1nJVL8e0OrHuxbJW56c6WVW/IKm9gBF/zhbqfaz0XgJQ==} + engines: {node: '>=15.0.0'} + + precinct@11.0.5: + resolution: {integrity: sha512-oHSWLC8cL/0znFhvln26D14KfCQFFn4KOLSw6hmLhd+LQ2SKt9Ljm89but76Pc7flM9Ty1TnXyrA2u16MfRV3w==} + engines: {node: ^14.14.0 || >=16.0.0} + hasBin: true + + present@0.0.3: + resolution: {integrity: sha512-d0QMXYTKHuAO0n0IfI/x2lbNwybdNWjRQ08hQySzqMQ2M0gwh/IetTv2glkPJihFn+cMDYjK/BiVgcLcjsASgg==} + pretty-format@29.7.0: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + prismjs-terminal@1.2.3: + resolution: {integrity: sha512-xc0zuJ5FMqvW+DpiRkvxURlz98DdfDsZcFHdO699+oL+ykbFfgI7O4VDEgUyc07BSL2NHl3zdb8m/tZ/aaqUrw==} + engines: {node: '>=16'} + + prismjs@1.29.0: + resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} + engines: {node: '>=6'} + + proc-log@4.2.0: + resolution: {integrity: sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + process-on-spawn@1.0.0: + resolution: {integrity: sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==} + engines: {node: '>=8'} + + prom-client@14.2.0: + resolution: {integrity: sha512-sF308EhTenb/pDRPakm+WgiN+VdM/T1RaHj1x+MvAuT8UiQP8JmOEbxVqtkbfR4LrvOg5n7ic01kRBDGXjYikA==} + engines: {node: '>=10'} + + promise-inflight@1.0.1: + resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + + promise-retry@2.0.1: + resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} + engines: {node: '>=10'} + prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} + protobufjs@7.3.2: + resolution: {integrity: sha512-RXyHaACeqXeqAKGLDl68rQKbmObRsTIn4TYVUUug1KfS47YWCo5MacGITEryugIgZqORCvJWEk4l449POg5Txg==} + engines: {node: '>=12.0.0'} + + pump@3.0.0: + resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} + + punycode@1.3.2: + resolution: {integrity: sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==} + pure-rand@6.1.0: resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==} + q@1.5.1: + resolution: {integrity: sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==} + engines: {node: '>=0.6.0', teleport: '>=0.2.0'} + deprecated: |- + You or someone you depend on is using Q, the JavaScript Promise library that gave JavaScript developers strong feelings about promises. They can almost certainly migrate to the native JavaScript promise now. Thank you literally everyone for joining me in this bet against the odds. Be excellent to each other. + + (For a CapTP with native promises, see @endo/eventual-send and @endo/captp) + qs@6.12.1: resolution: {integrity: sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==} engines: {node: '>=0.6'} + querystring@0.2.0: + resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==} + engines: {node: '>=0.4.x'} + deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + quick-lru@5.1.1: + resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} + engines: {node: '>=10'} + + quote-unquote@1.0.0: + resolution: {integrity: sha512-twwRO/ilhlG/FIgYeKGFqyHhoEhqgnKVkcmqMKi2r524gz3ZbDTcyFt38E9xjJI2vT+KbRNHVbnJ/e0I25Azwg==} + + rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + + react-dom@18.3.1: + resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} + peerDependencies: + react: ^18.3.1 + + react-element-to-jsx-string@15.0.0: + resolution: {integrity: sha512-UDg4lXB6BzlobN60P8fHWVPX3Kyw8ORrTeBtClmIlGdkOOE+GYQSFvmEU5iLLpwp/6v42DINwNcwOhOLfQ//FQ==} + peerDependencies: + react: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 + react-dom: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 + + react-is@18.1.0: + resolution: {integrity: sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==} + react-is@18.3.1: resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + react-reconciler@0.29.2: + resolution: {integrity: sha512-zZQqIiYgDCTP/f1N/mAR10nJGrPD2ZR+jDSEsKWJHYC7Cm2wodlwbR3upZRdC3cjIjSlTLNVyO7Iu0Yy7t2AYg==} + engines: {node: '>=0.10.0'} + peerDependencies: + react: ^18.3.1 + + react@18.3.1: + resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} + engines: {node: '>=0.10.0'} + + read-package-json-fast@3.0.2: + resolution: {integrity: sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + read-package-json@7.0.1: + resolution: {integrity: sha512-8PcDiZ8DXUjLf687Ol4BR8Bpm2umR7vhoZOzNRt+uxD9GpBh/K+CAAALVIiYFknmvlmyg7hM7BSNUXPaCCqd0Q==} + engines: {node: ^16.14.0 || >=18.0.0} + deprecated: This package is no longer supported. Please use @npmcli/package-json instead. + + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readdir-glob@1.1.3: + resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + redeyed@2.1.1: + resolution: {integrity: sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==} + require-directory@2.1.1: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} + requirejs-config-file@4.0.0: + resolution: {integrity: sha512-jnIre8cbWOyvr8a5F2KuqBnY+SDA4NXr/hzEZJG79Mxm2WiFQz2dzhC8ibtPJS7zkmBEl1mxSwp5HhC1W4qpxw==} + engines: {node: '>=10.13.0'} + + requirejs@2.3.7: + resolution: {integrity: sha512-DouTG8T1WanGok6Qjg2SXuCMzszOo0eHeH9hDZ5Y4x8Je+9JB38HdTLT4/VA8OaUhBa0JPVHJ0pyBkM1z+pDsw==} + engines: {node: '>=0.4.0'} + hasBin: true + + resolve-alpn@1.2.1: + resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} + resolve-cwd@3.0.0: resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} engines: {node: '>=8'} + resolve-dependency-path@3.0.2: + resolution: {integrity: sha512-Tz7zfjhLfsvR39ADOSk9us4421J/1ztVBo4rWUkF38hgHK5m0OCZ3NxFVpqHRkjctnwVa15igEUHFJp8MCS7vA==} + engines: {node: '>=14'} + resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} + resolve-import@1.4.6: + resolution: {integrity: sha512-CIw9e64QcKcCFUj9+KxUCJPy8hYofv6eVfo3U9wdhCm2E4IjvFnZ6G4/yIC4yP3f11+h6uU5b3LdS7O64LgqrA==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + resolve.exports@2.0.2: resolution: {integrity: sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==} engines: {node: '>=10'} @@ -1232,6 +3637,65 @@ packages: resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} hasBin: true + responselike@2.0.1: + resolution: {integrity: sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==} + + restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + + restore-cursor@4.0.0: + resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + + reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rimraf@2.6.3: + resolution: {integrity: sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==} + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + + rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + + rimraf@5.0.10: + resolution: {integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==} + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + sass-lookup@5.0.1: + resolution: {integrity: sha512-t0X5PaizPc2H4+rCwszAqHZRtr4bugo4pgiCvrBFvIX0XFxnr29g77LJcpyj9A0DcKf7gXMLcgvRjsonYI6x4g==} + engines: {node: '>=14'} + hasBin: true + + sax@1.2.1: + resolution: {integrity: sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==} + + scheduler@0.23.2: + resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} + + seedrandom@3.0.5: + resolution: {integrity: sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==} + semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -1260,6 +3724,14 @@ packages: signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sigstore@2.3.1: + resolution: {integrity: sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==} + engines: {node: ^16.14.0 || >=18.0.0} + sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} @@ -1267,32 +3739,132 @@ packages: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} + slice-ansi@4.0.0: + resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} + engines: {node: '>=10'} + + slice-ansi@5.0.0: + resolution: {integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==} + engines: {node: '>=12'} + + slice-ansi@6.0.0: + resolution: {integrity: sha512-6bn4hRfkTvDfUoEQYkERg0BVF1D0vrX9HEkMl08uDiNWvVvjylLHvZFZWkDo6wjT8tUctbYl1nCOuE66ZTaUtA==} + engines: {node: '>=14.16'} + + smart-buffer@4.2.0: + resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} + engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + + socket.io-client@4.7.5: + resolution: {integrity: sha512-sJ/tqHOCe7Z50JCBCXrsY3I2k03iOiUe+tj1OmKeD2lXPiGH/RUCdTZFoqVyN7l1MnpIzPrGtLcijffmeouNlQ==} + engines: {node: '>=10.0.0'} + + socket.io-parser@4.2.4: + resolution: {integrity: sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==} + engines: {node: '>=10.0.0'} + + socketio-wildcard@2.0.0: + resolution: {integrity: sha512-Bf3ioZq15Z2yhFLDasRvbYitg82rwm+5AuER5kQvEQHhNFf4R4K5o/h57nEpN7A59T9FyRtTj34HZfMWAruw/A==} + + socks-proxy-agent@8.0.4: + resolution: {integrity: sha512-GNAq/eg8Udq2x0eNiFkr9gRg5bA7PXEWagQdeRX4cPSG+X/8V38v637gim9bjFptMk1QWsCTr0ttrJEiXbNnRw==} + engines: {node: '>= 14'} + + socks@2.8.3: + resolution: {integrity: sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==} + engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + + source-map-js@1.2.0: + resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} + engines: {node: '>=0.10.0'} + + source-map-support@0.3.3: + resolution: {integrity: sha512-9O4+y9n64RewmFoKUZ/5Tx9IHIcXM6Q+RTSw6ehnqybUz4a7iwR3Eaw80uLtqqQ5D0C+5H03D4KKGo9PdP33Gg==} + source-map-support@0.5.13: resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + source-map@0.1.32: + resolution: {integrity: sha512-htQyLrrRLkQ87Zfrir4/yN+vAUd6DNjVayEjTSHXu29AYQJw57I4/xEL/M6p6E/woPNJwvZt6rVlzc7gFEJccQ==} + engines: {node: '>=0.8.0'} + source-map@0.6.1: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} + spdx-correct@3.2.0: + resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} + + spdx-exceptions@2.5.0: + resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==} + + spdx-expression-parse@3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} + + spdx-license-ids@3.0.18: + resolution: {integrity: sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==} + sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + sprintf-js@1.1.3: + resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} + + sqs-consumer@5.8.0: + resolution: {integrity: sha512-pJReMEtDM9/xzQTffb7dxMD5MKagBfOW65m+ITsbpNk0oZmJ38tTC4LPmj0/7ZcKSOqi2LrpA1b0qGYOwxlHJg==} + peerDependencies: + aws-sdk: ^2.1271.0 + + ssri@10.0.6: + resolution: {integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} + stoppable@1.1.0: + resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==} + engines: {node: '>=4', npm: '>=6'} + string-length@4.0.2: resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} engines: {node: '>=10'} + string-length@6.0.0: + resolution: {integrity: sha512-1U361pxZHEQ+FeSjzqRpV+cu2vTzYeWeafXFLykiFlv4Vc0n3njgU8HrMbyik5uwm77naWMuVG8fhEF+Ovb1Kg==} + engines: {node: '>=16'} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + stringify-object@3.3.0: + resolution: {integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==} + engines: {node: '>=4'} + strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + strip-bom@4.0.0: resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} engines: {node: '>=8'} @@ -1301,10 +3873,22 @@ packages: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} engines: {node: '>=6'} + strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + strnum@1.0.5: + resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==} + + stylus-lookup@5.0.1: + resolution: {integrity: sha512-tLtJEd5AGvnVy4f9UHQMw4bkJJtaAcmo54N+ovQBjDY3DuWyK9Eltxzr5+KG0q4ew6v2EHyuWWNnHeiw/Eo7rQ==} + engines: {node: '>=14'} + hasBin: true + superagent@9.0.2: resolution: {integrity: sha512-xuW7dzkUpcJq7QnhOsnNUgtYp3xRwpt2F7abdRYIpCsAt0hhUqia0EdxyXZQQpNmGtsCzYHryaKSV3q3GJnq7w==} engines: {node: '>=14.18.0'} @@ -1325,14 +3909,75 @@ packages: resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} engines: {node: '>=10'} + supports-hyperlinks@2.3.0: + resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} + engines: {node: '>=8'} + supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + sync-content@1.0.2: + resolution: {integrity: sha512-znd3rYiiSxU3WteWyS9a6FXkTA/Wjk8WQsOyzHbineeL837dLn3DA4MRhsIX3qGcxDMH6+uuFV4axztssk7wEQ==} + engines: {node: '>=14'} + hasBin: true + + tap-parser@16.0.1: + resolution: {integrity: sha512-vKianJzSSzLkJ3bHBwzvZDDRi9yGMwkRANJxwPAjAue50owB8rlluYySmTN4tZVH0nsh6stvrQbg9kuCL5svdg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + + tap-yaml@2.2.2: + resolution: {integrity: sha512-MWG4OpAKtNoNVjCz/BqlDJiwTM99tiHRhHPS4iGOe1ZS0CgM4jSFH92lthSFvvy4EdDjQZDV7uYqUFlU9JuNhw==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + tap@19.2.5: + resolution: {integrity: sha512-Mz7MznUuKCqrN9dr0s8REt6zLg6WLNrvGXwDSaUyPO73dpXXjakYA7YVKRWu6TBnj7NsSYKuHXpQFROlqZ2KTg==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + hasBin: true + + tapable@2.2.1: + resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} + engines: {node: '>=6'} + + tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + + tar@6.2.1: + resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} + engines: {node: '>=10'} + + tcompare@7.0.1: + resolution: {integrity: sha512-JN5s7hgmg/Ya5HxZqCnywT+XiOGRFcJRgYhtMyt/1m+h0yWpWwApO7HIM8Bpwyno9hI151ljjp5eAPCHhIGbpQ==} + engines: {node: 16 >=16.17.0 || 18 >= 18.6.0 || >=20} + + tdigest@0.1.2: + resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} + + temp@0.9.4: + resolution: {integrity: sha512-yYrrsWnrXMcdsnu/7YMYAofM1ktpL5By7vZhf15CrXijWWrEYZks5AXBudalfSWJLlnen/QUJUB5aoB0kqZUGA==} + engines: {node: '>=6.0.0'} + test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} + thrift@0.14.2: + resolution: {integrity: sha512-bW8EaE6iw3hSt4HB2HpBdHW86Xpb9IUJfqufx4NwEu7OGuIpS0ISj+Yy1Z1Wvhfno6SPNhKRJ1qFXea84HcrOQ==} + engines: {node: '>= 10.18.0'} + + tldts-core@6.1.39: + resolution: {integrity: sha512-+Qib8VaRq6F56UjP4CJXd30PI4s3hFumDywUlsbiEWoA8+lfAaWNTLr3e6/zZOgHzVyon4snHaybeFHd8C0j/A==} + + tldts@6.1.39: + resolution: {integrity: sha512-UCGXcPhYIUELc+FifEeDXYkoTWNU6iOEdM/Q5LsvkTz2SnpQ3q5onA+DiiZlR5YDskMhfK1YBQDeWL7PH9/miQ==} + hasBin: true + + tmp@0.2.1: + resolution: {integrity: sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==} + engines: {node: '>=8.17.0'} + tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} @@ -1344,9 +3989,20 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + tough-cookie@5.0.0-rc.4: + resolution: {integrity: sha512-EN59UG6X/O6Nz2p21O6UK8R97zvLETOZ9+FGNdo56VuJZ8cftVCZ6tyxvedkQBfcX22avA1HY+4n04OVT2q6cw==} + engines: {node: '>=16'} + tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + trivial-deferred@2.0.0: + resolution: {integrity: sha512-iGbM7X2slv9ORDVj2y2FFUq3cP/ypbtu2nQ8S38ufjL0glBABvmR9pTdsib1XtS2LUhhLMbelaBUaf/s5J3dSw==} + engines: {node: '>= 8'} + + try-require@1.2.1: + resolution: {integrity: sha512-aMzrGUIA/R2LwUgvsOusx+GTy8ERyNjpBzbWgS1Qx4oTFlXCMxY3PyyXbPE1pvrvK/CXpO+BBREEqrTkNroC+A==} + ts-jest@29.1.5: resolution: {integrity: sha512-UuClSYxM7byvvYfyWdFI+/2UxMmwNyJb0NPkZPQE2hew3RurV7l7zURgOHAd/1I1ZdPpe3GUsXNXAcN8TFKSIg==} engines: {node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0} @@ -1371,10 +4027,53 @@ packages: esbuild: optional: true + ts-node@10.9.2: + resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + + tsconfig-paths@4.2.0: + resolution: {integrity: sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==} + engines: {node: '>=6'} + + tshy@1.18.0: + resolution: {integrity: sha512-FQudIujBazHRu7CVPHKQE9/Xq1Wc7lezxD/FCnTXx2PTcnoSN32DVpb/ZXvzV2NJBTDB3XKjqX8Cdm+2UK1DlQ==} + engines: {node: 16 >=16.17 || 18 >=18.15.0 || >=20.6.1} + hasBin: true + + tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + + tslib@2.6.3: + resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} + + tsutils@3.21.0: + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' + + tuf-js@2.2.1: + resolution: {integrity: sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==} + engines: {node: ^16.14.0 || >=18.0.0} + type-detect@4.0.8: resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} engines: {node: '>=4'} + type-fest@0.12.0: + resolution: {integrity: sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg==} + engines: {node: '>=10'} + type-fest@0.21.3: resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} engines: {node: '>=10'} @@ -1387,19 +4086,79 @@ packages: undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici@6.19.7: + resolution: {integrity: sha512-HR3W/bMGPSr90i8AAp2C4DM3wChFdJPLrWYpIS++LxS8K+W535qftjt+4MyjNYHeWabMj1nvtmLIi7l++iq91A==} + engines: {node: '>=18.17'} + + unique-filename@3.0.0: + resolution: {integrity: sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + unique-slug@4.0.0: + resolution: {integrity: sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + + unix-dgram@2.0.6: + resolution: {integrity: sha512-AURroAsb73BZ6CdAyMrTk/hYKNj3DuYYEuOaB8bYMOHGKupRNScw90Q5C71tWJc3uE7dIeXRyuwN0xLLq3vDTg==} + engines: {node: '>=0.10.48'} + update-browserslist-db@1.0.14: resolution: {integrity: sha512-JixKH8GR2pWYshIPUg/NujK3JO7JiqEEUiNArE86NQyrgUuZeTlZQN3xuS/yiV5Kb48ev9K6RqNkaJjXsdg7Jw==} hasBin: true peerDependencies: browserslist: '>= 4.21.0' + url@0.10.3: + resolution: {integrity: sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==} + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + util@0.12.5: + resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} + + uuid@8.0.0: + resolution: {integrity: sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==} + hasBin: true + + uuid@8.3.2: + resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} + hasBin: true + + uuid@9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + + v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + v8-to-istanbul@9.2.0: resolution: {integrity: sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==} engines: {node: '>=10.12.0'} + validate-npm-package-license@3.0.4: + resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + + validate-npm-package-name@5.0.1: + resolution: {integrity: sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + walk-sync@0.2.7: + resolution: {integrity: sha512-OH8GdRMowEFr0XSHQeX5fGweO6zSVHo7bG/0yJQx6LAj9Oukz0C8heI3/FYectT66gY0IPGe89kOvU410/UNpg==} + + walk-up-path@3.0.1: + resolution: {integrity: sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==} + walker@1.0.8: resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + wcwidth@1.0.1: + resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + web-streams-polyfill@3.3.3: resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} engines: {node: '>= 8'} @@ -1411,18 +4170,50 @@ packages: webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} + engines: {node: '>=18'} + + whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} + engines: {node: '>=18'} + whatwg-url@5.0.0: resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + which-typed-array@1.1.15: + resolution: {integrity: sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==} + engines: {node: '>= 0.4'} + which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} hasBin: true + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + + widest-line@3.1.0: + resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} + engines: {node: '>=8'} + + widest-line@4.0.1: + resolution: {integrity: sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==} + engines: {node: '>=12'} + + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} @@ -1430,6 +4221,29 @@ packages: resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + ws@5.2.4: + resolution: {integrity: sha512-fFCejsuC8f9kOSu9FYaOw8CdO68O3h5v0lg4p74o8JqWpwTf9tniOD+nOB78aWoVSS6WptVUmDrp/KPsMVBWFQ==} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + ws@8.17.1: resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==} engines: {node: '>=10.0.0'} @@ -1442,6 +4256,18 @@ packages: utf-8-validate: optional: true + xml2js@0.6.2: + resolution: {integrity: sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==} + engines: {node: '>=4.0.0'} + + xmlbuilder@11.0.1: + resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==} + engines: {node: '>=4.0'} + + xmlhttprequest-ssl@2.0.0: + resolution: {integrity: sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==} + engines: {node: '>=0.4.0'} + y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} @@ -1449,6 +4275,23 @@ packages: yallist@3.1.1: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + + yaml-js@0.2.3: + resolution: {integrity: sha512-6xUQtVKl1qcd0EXtTEzUDVJy9Ji1fYa47LtkDtYKlIjhibPE9knNPmoRyf6SGREFHlOAUyDe9OdYqRP4DuSi5Q==} + + yaml-types@0.3.0: + resolution: {integrity: sha512-i9RxAO/LZBiE0NJUy9pbN5jFz5EasYDImzRkj8Y81kkInTi1laia3P3K/wlMKzOxFQutZip8TejvQP/DwgbU7A==} + engines: {node: '>= 16', npm: '>= 7'} + peerDependencies: + yaml: ^2.3.0 + + yaml@2.5.0: + resolution: {integrity: sha512-2wWLbGbYDiSqqIKoPjar3MPgB94ErzCtrNE1FdqGuaO0pi2JGjmE8aW8TDZwzU7vuxcGRdL/4gPQwQ7hD5AMSw==} + engines: {node: '>= 14'} + hasBin: true + yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} @@ -1457,18 +4300,34 @@ packages: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} + yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + yoga-wasm-web@0.3.3: + resolution: {integrity: sha512-N+d4UJSJbt/R3wqY7Coqs5pcV0aUj2j9IaQ3rNj9bVCLld8tTGKRa2USARjnvZJWVx1NDmQev8EknoczaOQDOA==} + + zip-stream@4.1.1: + resolution: {integrity: sha512-9qv4rlDiopXg4E69k+vMHjNN63YFMe9sZMrdlvKnCjlCRWeCBswPPMPUfx+ipsAWq1LXHe70RcbaHdJJpS6hyQ==} + engines: {node: '>= 10'} + snapshots: + '@alcalzone/ansi-tokenize@0.1.3': + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + '@ampproject/remapping@2.3.0': dependencies: '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 - '@anthropic-ai/sdk@0.24.3': + '@anthropic-ai/sdk@0.24.3(encoding@0.1.13)': dependencies: '@types/node': 18.19.39 '@types/node-fetch': 2.6.11 @@ -1476,11 +4335,675 @@ snapshots: agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 - node-fetch: 2.7.0 + node-fetch: 2.7.0(encoding@0.1.13) web-streams-polyfill: 3.3.3 transitivePeerDependencies: - encoding + '@artilleryio/int-commons@2.10.0': + dependencies: + async: 2.6.4 + cheerio: 1.0.0 + debug: 4.3.4(supports-color@8.1.1) + deep-for-each: 3.0.0 + espree: 9.6.1 + jsonpath-plus: 7.2.0 + lodash: 4.17.21 + ms: 2.1.3 + transitivePeerDependencies: + - supports-color + + '@artilleryio/int-core@2.14.0': + dependencies: + '@artilleryio/int-commons': 2.10.0 + '@artilleryio/sketches-js': 2.1.1 + agentkeepalive: 4.5.0 + arrivals: 2.1.2 + async: 2.6.4 + chalk: 2.4.2 + cheerio: 1.0.0 + cookie-parser: 1.4.6 + csv-parse: 4.16.3 + debug: 4.3.4(supports-color@8.1.1) + decompress-response: 6.0.0 + deep-for-each: 3.0.0 + driftless: 2.0.3 + esprima: 4.0.1 + eventemitter3: 4.0.7 + fast-deep-equal: 3.1.3 + filtrex: 0.5.4 + form-data: 3.0.1 + got: 11.8.6 + hpagent: 0.1.2 + https-proxy-agent: 5.0.1 + lodash: 4.17.21 + ms: 2.1.3 + protobufjs: 7.3.2 + socket.io-client: 4.7.5 + socketio-wildcard: 2.0.0 + tough-cookie: 5.0.0-rc.4 + try-require: 1.2.1 + uuid: 8.3.2 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + '@artilleryio/sketches-js@2.1.1': {} + + '@aws-crypto/sha256-browser@5.2.0': + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-locate-window': 3.568.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.3 + + '@aws-crypto/sha256-js@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.609.0 + tslib: 2.6.3 + + '@aws-crypto/supports-web-crypto@5.2.0': + dependencies: + tslib: 2.6.3 + + '@aws-crypto/util@5.2.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.3 + + '@aws-sdk/client-cloudwatch@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-compression': 3.0.7 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + '@smithy/util-waiter': 3.1.2 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-cognito-identity@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sso@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.629.0 + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sts@3.629.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/core': 3.629.0 + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/middleware-host-header': 3.620.0 + '@aws-sdk/middleware-logger': 3.609.0 + '@aws-sdk/middleware-recursion-detection': 3.620.0 + '@aws-sdk/middleware-user-agent': 3.620.0 + '@aws-sdk/region-config-resolver': 3.614.0 + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@aws-sdk/util-user-agent-browser': 3.609.0 + '@aws-sdk/util-user-agent-node': 3.614.0 + '@smithy/config-resolver': 3.0.5 + '@smithy/core': 2.3.2 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/hash-node': 3.0.3 + '@smithy/invalid-dependency': 3.0.3 + '@smithy/middleware-content-length': 3.0.5 + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/middleware-stack': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.14 + '@smithy/util-defaults-mode-node': 3.0.14 + '@smithy/util-endpoints': 2.0.5 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/core@3.629.0': + dependencies: + '@smithy/core': 2.3.2 + '@smithy/node-config-provider': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/signature-v4': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-middleware': 3.0.3 + fast-xml-parser: 4.4.1 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-cognito-identity@3.629.0': + dependencies: + '@aws-sdk/client-cognito-identity': 3.629.0 + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-env@3.620.1': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-http@3.622.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-stream': 3.1.3 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-ini@3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/credential-provider-env': 3.620.1 + '@aws-sdk/credential-provider-http': 3.622.0 + '@aws-sdk/credential-provider-process': 3.620.1 + '@aws-sdk/credential-provider-sso': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/credential-provider-web-identity': 3.621.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-node@3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.620.1 + '@aws-sdk/credential-provider-http': 3.622.0 + '@aws-sdk/credential-provider-ini': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/credential-provider-process': 3.620.1 + '@aws-sdk/credential-provider-sso': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/credential-provider-web-identity': 3.621.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/credential-provider-process@3.620.1': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/credential-provider-sso@3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))': + dependencies: + '@aws-sdk/client-sso': 3.629.0 + '@aws-sdk/token-providers': 3.614.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-web-identity@3.621.0(@aws-sdk/client-sts@3.629.0)': + dependencies: + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/credential-providers@3.630.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))': + dependencies: + '@aws-sdk/client-cognito-identity': 3.629.0 + '@aws-sdk/client-sso': 3.629.0 + '@aws-sdk/client-sts': 3.629.0 + '@aws-sdk/credential-provider-cognito-identity': 3.629.0 + '@aws-sdk/credential-provider-env': 3.620.1 + '@aws-sdk/credential-provider-http': 3.622.0 + '@aws-sdk/credential-provider-ini': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/credential-provider-node': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/credential-provider-process': 3.620.1 + '@aws-sdk/credential-provider-sso': 3.629.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@aws-sdk/credential-provider-web-identity': 3.621.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/middleware-host-header@3.620.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/middleware-logger@3.609.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/middleware-recursion-detection@3.620.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/middleware-user-agent@3.620.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@aws-sdk/util-endpoints': 3.614.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/region-config-resolver@3.614.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@aws-sdk/token-providers@3.614.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))': + dependencies: + '@aws-sdk/client-sso-oidc': 3.629.0(@aws-sdk/client-sts@3.629.0) + '@aws-sdk/types': 3.609.0 + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/types@3.609.0': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@aws-sdk/util-endpoints@3.614.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/types': 3.3.0 + '@smithy/util-endpoints': 2.0.5 + tslib: 2.6.3 + + '@aws-sdk/util-locate-window@3.568.0': + dependencies: + tslib: 2.6.3 + + '@aws-sdk/util-user-agent-browser@3.609.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/types': 3.3.0 + bowser: 2.11.0 + tslib: 2.6.3 + + '@aws-sdk/util-user-agent-node@3.614.0': + dependencies: + '@aws-sdk/types': 3.609.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@azure/abort-controller@1.1.0': + dependencies: + tslib: 2.6.3 + + '@azure/abort-controller@2.1.2': + dependencies: + tslib: 2.6.3 + + '@azure/arm-containerinstance@9.1.0': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-lro': 2.7.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.16.3 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-auth@1.7.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-util': 1.9.2 + tslib: 2.6.3 + + '@azure/core-client@1.9.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.7.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-http-compat@2.1.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-client': 1.9.2 + '@azure/core-rest-pipeline': 1.16.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-lro@2.7.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + tslib: 2.6.3 + + '@azure/core-paging@1.6.2': + dependencies: + tslib: 2.6.3 + + '@azure/core-rest-pipeline@1.16.3': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.7.2 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.5 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/core-tracing@1.1.2': + dependencies: + tslib: 2.6.3 + + '@azure/core-util@1.9.2': + dependencies: + '@azure/abort-controller': 2.1.2 + tslib: 2.6.3 + + '@azure/core-xml@1.4.3': + dependencies: + fast-xml-parser: 4.4.1 + tslib: 2.6.3 + + '@azure/identity@4.4.1': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/logger': 1.1.4 + '@azure/msal-browser': 3.21.0 + '@azure/msal-node': 2.13.0 + events: 3.3.0 + jws: 4.0.0 + open: 8.4.2 + stoppable: 1.1.0 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/logger@1.1.4': + dependencies: + tslib: 2.6.3 + + '@azure/msal-browser@3.21.0': + dependencies: + '@azure/msal-common': 14.14.1 + + '@azure/msal-common@14.14.1': {} + + '@azure/msal-node@2.13.0': + dependencies: + '@azure/msal-common': 14.14.1 + jsonwebtoken: 9.0.2 + uuid: 8.3.2 + + '@azure/storage-blob@12.24.0': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-http-compat': 2.1.2 + '@azure/core-lro': 2.7.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/core-xml': 1.4.3 + '@azure/logger': 1.1.4 + events: 3.3.0 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + + '@azure/storage-queue@12.23.0': + dependencies: + '@azure/abort-controller': 1.1.0 + '@azure/core-auth': 1.7.2 + '@azure/core-client': 1.9.2 + '@azure/core-http-compat': 2.1.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.16.3 + '@azure/core-tracing': 1.1.2 + '@azure/core-util': 1.9.2 + '@azure/core-xml': 1.4.3 + '@azure/logger': 1.1.4 + tslib: 2.6.3 + transitivePeerDependencies: + - supports-color + '@babel/code-frame@7.24.2': dependencies: '@babel/highlight': 7.24.5 @@ -1501,7 +5024,7 @@ snapshots: '@babel/traverse': 7.24.5 '@babel/types': 7.24.5 convert-source-map: 2.0.0 - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -1668,7 +5191,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.5 '@babel/parser': 7.24.5 '@babel/types': 7.24.5 - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -1679,10 +5202,67 @@ snapshots: '@babel/helper-validator-identifier': 7.24.5 to-fast-properties: 2.0.0 + '@base2/pretty-print-object@1.0.1': {} + '@bcoe/v8-coverage@0.2.3': {} + '@colors/colors@1.5.0': + optional: true + + '@cspotcode/source-map-support@0.8.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + + '@dependents/detective-less@4.1.0': + dependencies: + gonzales-pe: 4.3.0 + node-source-walk: 6.0.2 + '@dqbd/tiktoken@1.0.15': {} + '@grpc/grpc-js@1.11.1': + dependencies: + '@grpc/proto-loader': 0.7.13 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.13': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.2.3 + protobufjs: 7.3.2 + yargs: 17.7.2 + + '@hapi/hoek@9.3.0': {} + + '@hapi/topo@5.1.0': + dependencies: + '@hapi/hoek': 9.3.0 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@isaacs/ts-node-temp-fork-for-pr-2009@10.9.7(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node14': 14.1.2 + '@tsconfig/node16': 16.1.3 + '@tsconfig/node18': 18.2.4 + '@tsconfig/node20': 20.1.4 + '@types/node': 20.14.9 + acorn: 8.12.1 + acorn-walk: 8.3.3 + arg: 4.1.3 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.4.5 + v8-compile-cache-lib: 3.0.1 + '@istanbuljs/load-nyc-config@1.1.0': dependencies: camelcase: 5.3.1 @@ -1702,7 +5282,7 @@ snapshots: jest-util: 29.7.0 slash: 3.0.0 - '@jest/core@29.7.0': + '@jest/core@29.7.0(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5))': dependencies: '@jest/console': 29.7.0 '@jest/reporters': 29.7.0 @@ -1716,7 +5296,7 @@ snapshots: exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@20.14.9) + jest-config: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -1872,8 +5452,458 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping@0.3.9': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + + '@js-sdsl/ordered-map@4.4.2': {} + + '@ngneat/falso@7.2.0': + dependencies: + seedrandom: 3.0.5 + uuid: 8.3.2 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@npmcli/agent@2.2.2': + dependencies: + agent-base: 7.1.1 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.5 + lru-cache: 10.4.3 + socks-proxy-agent: 8.0.4 + transitivePeerDependencies: + - supports-color + + '@npmcli/fs@3.1.1': + dependencies: + semver: 7.6.2 + + '@npmcli/git@5.0.8': + dependencies: + '@npmcli/promise-spawn': 7.0.2 + ini: 4.1.3 + lru-cache: 10.4.3 + npm-pick-manifest: 9.1.0 + proc-log: 4.2.0 + promise-inflight: 1.0.1 + promise-retry: 2.0.1 + semver: 7.6.2 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + + '@npmcli/installed-package-contents@2.1.0': + dependencies: + npm-bundled: 3.0.1 + npm-normalize-package-bin: 3.0.1 + + '@npmcli/node-gyp@3.0.0': {} + + '@npmcli/package-json@5.2.0': + dependencies: + '@npmcli/git': 5.0.8 + glob: 10.4.5 + hosted-git-info: 7.0.2 + json-parse-even-better-errors: 3.0.2 + normalize-package-data: 6.0.2 + proc-log: 4.2.0 + semver: 7.6.2 + transitivePeerDependencies: + - bluebird + + '@npmcli/promise-spawn@7.0.2': + dependencies: + which: 4.0.0 + + '@npmcli/redact@1.1.0': {} + + '@npmcli/run-script@7.0.4': + dependencies: + '@npmcli/node-gyp': 3.0.0 + '@npmcli/package-json': 5.2.0 + '@npmcli/promise-spawn': 7.0.2 + node-gyp: 10.2.0 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + - supports-color + + '@oclif/core@2.16.0(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@types/cli-progress': 3.11.6 + ansi-escapes: 4.3.2 + ansi-styles: 4.3.0 + cardinal: 2.1.1 + chalk: 4.1.2 + clean-stack: 3.0.1 + cli-progress: 3.12.0 + debug: 4.3.4(supports-color@8.1.1) + ejs: 3.1.10 + get-package-type: 0.1.0 + globby: 11.1.0 + hyperlinker: 1.0.0 + indent-string: 4.0.0 + is-wsl: 2.2.0 + js-yaml: 3.14.1 + natural-orderby: 2.0.3 + object-treeify: 1.1.33 + password-prompt: 1.1.3 + slice-ansi: 4.0.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + supports-color: 8.1.1 + supports-hyperlinks: 2.3.0 + ts-node: 10.9.2(@types/node@20.14.9)(typescript@5.4.5) + tslib: 2.6.3 + widest-line: 3.1.0 + wordwrap: 1.0.0 + wrap-ansi: 7.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@oclif/plugin-help@5.2.20(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@oclif/core': 2.16.0(@types/node@20.14.9)(typescript@5.4.5) + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@oclif/plugin-not-found@2.4.3(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@oclif/core': 2.16.0(@types/node@20.14.9)(typescript@5.4.5) + chalk: 4.1.2 + fast-levenshtein: 3.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@opentelemetry/api-logs@0.41.2': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/api-logs@0.43.0': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/api@1.9.0': {} + + '@opentelemetry/context-async-hooks@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/core@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.15.2 + + '@opentelemetry/core@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.17.0 + + '@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/exporter-metrics-otlp-grpc@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-metrics-otlp-http@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-metrics-otlp-proto@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-grpc@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-http@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-proto@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-zipkin@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/otlp-exporter-base@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-exporter-base@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-grpc-exporter-base@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + protobufjs: 7.3.2 + + '@opentelemetry/otlp-grpc-exporter-base@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.1 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.43.0(@opentelemetry/api@1.9.0) + protobufjs: 7.3.2 + + '@opentelemetry/otlp-proto-exporter-base@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.41.2(@opentelemetry/api@1.9.0) + protobufjs: 7.3.2 + + '@opentelemetry/otlp-transformer@0.41.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.41.2 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.41.2(@opentelemetry/api-logs@0.41.2)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-transformer@0.43.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.43.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.43.0(@opentelemetry/api-logs@0.43.0)(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/resources@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.15.2 + + '@opentelemetry/resources@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.17.0 + + '@opentelemetry/resources@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/sdk-logs@0.41.2(@opentelemetry/api-logs@0.41.2)(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.41.2 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + + '@opentelemetry/sdk-logs@0.43.0(@opentelemetry/api-logs@0.43.0)(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.43.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/sdk-metrics@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + '@opentelemetry/sdk-metrics@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + '@opentelemetry/sdk-metrics@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + '@opentelemetry/sdk-trace-base@1.15.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.15.2(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.15.2 + + '@opentelemetry/sdk-trace-base@1.17.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.17.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.17.0 + + '@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + + '@opentelemetry/semantic-conventions@1.15.2': {} + + '@opentelemetry/semantic-conventions@1.17.0': {} + + '@opentelemetry/semantic-conventions@1.25.1': {} + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@playwright/browser-chromium@1.45.3': + dependencies: + playwright-core: 1.45.3 + + '@playwright/test@1.45.3': + dependencies: + playwright: 1.45.3 + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@sideway/address@4.1.5': + dependencies: + '@hapi/hoek': 9.3.0 + + '@sideway/formula@3.0.1': {} + + '@sideway/pinpoint@2.0.0': {} + + '@sigstore/bundle@2.3.2': + dependencies: + '@sigstore/protobuf-specs': 0.3.2 + + '@sigstore/core@1.1.0': {} + + '@sigstore/protobuf-specs@0.3.2': {} + + '@sigstore/sign@2.3.2': + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.2 + make-fetch-happen: 13.0.1 + proc-log: 4.2.0 + promise-retry: 2.0.1 + transitivePeerDependencies: + - supports-color + + '@sigstore/tuf@2.3.4': + dependencies: + '@sigstore/protobuf-specs': 0.3.2 + tuf-js: 2.2.1 + transitivePeerDependencies: + - supports-color + + '@sigstore/verify@1.2.1': + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.2 + '@sinclair/typebox@0.27.8': {} + '@sindresorhus/is@4.6.0': {} + '@sinonjs/commons@3.0.1': dependencies: type-detect: 4.0.8 @@ -1882,6 +5912,291 @@ snapshots: dependencies: '@sinonjs/commons': 3.0.1 + '@smithy/abort-controller@3.1.1': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/config-resolver@3.0.5': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@smithy/core@2.3.2': + dependencies: + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-retry': 3.0.14 + '@smithy/middleware-serde': 3.0.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@smithy/credential-provider-imds@3.2.0': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + tslib: 2.6.3 + + '@smithy/fetch-http-handler@3.2.4': + dependencies: + '@smithy/protocol-http': 4.1.0 + '@smithy/querystring-builder': 3.0.3 + '@smithy/types': 3.3.0 + '@smithy/util-base64': 3.0.0 + tslib: 2.6.3 + + '@smithy/hash-node@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/invalid-dependency@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/is-array-buffer@2.2.0': + dependencies: + tslib: 2.6.3 + + '@smithy/is-array-buffer@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/middleware-compression@3.0.7': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-utf8': 3.0.0 + fflate: 0.8.1 + tslib: 2.6.3 + + '@smithy/middleware-content-length@3.0.5': + dependencies: + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/middleware-endpoint@3.1.0': + dependencies: + '@smithy/middleware-serde': 3.0.3 + '@smithy/node-config-provider': 3.1.4 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/url-parser': 3.0.3 + '@smithy/util-middleware': 3.0.3 + tslib: 2.6.3 + + '@smithy/middleware-retry@3.0.14': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/protocol-http': 4.1.0 + '@smithy/service-error-classification': 3.0.3 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-retry': 3.0.3 + tslib: 2.6.3 + uuid: 9.0.1 + + '@smithy/middleware-serde@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/middleware-stack@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/node-config-provider@3.1.4': + dependencies: + '@smithy/property-provider': 3.1.3 + '@smithy/shared-ini-file-loader': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/node-http-handler@3.1.4': + dependencies: + '@smithy/abort-controller': 3.1.1 + '@smithy/protocol-http': 4.1.0 + '@smithy/querystring-builder': 3.0.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/property-provider@3.1.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/protocol-http@4.1.0': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/querystring-builder@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + '@smithy/util-uri-escape': 3.0.0 + tslib: 2.6.3 + + '@smithy/querystring-parser@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/service-error-classification@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + + '@smithy/shared-ini-file-loader@3.1.4': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/signature-v4@4.1.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-middleware': 3.0.3 + '@smithy/util-uri-escape': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/smithy-client@3.1.12': + dependencies: + '@smithy/middleware-endpoint': 3.1.0 + '@smithy/middleware-stack': 3.0.3 + '@smithy/protocol-http': 4.1.0 + '@smithy/types': 3.3.0 + '@smithy/util-stream': 3.1.3 + tslib: 2.6.3 + + '@smithy/types@3.3.0': + dependencies: + tslib: 2.6.3 + + '@smithy/url-parser@3.0.3': + dependencies: + '@smithy/querystring-parser': 3.0.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-base64@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-body-length-browser@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-body-length-node@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-buffer-from@2.2.0': + dependencies: + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.6.3 + + '@smithy/util-buffer-from@3.0.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-config-provider@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-defaults-mode-browser@3.0.14': + dependencies: + '@smithy/property-provider': 3.1.3 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + bowser: 2.11.0 + tslib: 2.6.3 + + '@smithy/util-defaults-mode-node@3.0.14': + dependencies: + '@smithy/config-resolver': 3.0.5 + '@smithy/credential-provider-imds': 3.2.0 + '@smithy/node-config-provider': 3.1.4 + '@smithy/property-provider': 3.1.3 + '@smithy/smithy-client': 3.1.12 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-endpoints@2.0.5': + dependencies: + '@smithy/node-config-provider': 3.1.4 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-hex-encoding@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-middleware@3.0.3': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-retry@3.0.3': + dependencies: + '@smithy/service-error-classification': 3.0.3 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@smithy/util-stream@3.1.3': + dependencies: + '@smithy/fetch-http-handler': 3.2.4 + '@smithy/node-http-handler': 3.1.4 + '@smithy/types': 3.3.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-uri-escape@3.0.0': + dependencies: + tslib: 2.6.3 + + '@smithy/util-utf8@2.3.0': + dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.6.3 + + '@smithy/util-utf8@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + tslib: 2.6.3 + + '@smithy/util-waiter@3.1.2': + dependencies: + '@smithy/abort-controller': 3.1.1 + '@smithy/types': 3.3.0 + tslib: 2.6.3 + + '@socket.io/component-emitter@3.1.2': {} + '@supabase/auth-js@2.64.2': dependencies: '@supabase/node-fetch': 2.6.15 @@ -1924,6 +6239,279 @@ snapshots: - bufferutil - utf-8-validate + '@szmarczak/http-timer@4.0.6': + dependencies: + defer-to-connect: 2.0.1 + + '@tapjs/after-each@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + function-loop: 4.0.0 + + '@tapjs/after@1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + is-actual-promise: 1.0.2 + + '@tapjs/asserts@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + is-actual-promise: 1.0.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + transitivePeerDependencies: + - react + - react-dom + + '@tapjs/before-each@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + function-loop: 4.0.0 + + '@tapjs/before@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + is-actual-promise: 1.0.2 + + '@tapjs/chdir@1.1.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/config@3.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + chalk: 5.3.0 + jackspeak: 3.4.3 + polite-json: 4.0.1 + tap-yaml: 2.2.2 + walk-up-path: 3.0.1 + + '@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/processinfo': 3.1.8 + '@tapjs/stack': 2.0.1 + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + async-hook-domain: 4.0.1 + diff: 5.2.0 + is-actual-promise: 1.0.2 + minipass: 7.1.2 + signal-exit: 4.1.0 + tap-parser: 16.0.1 + tap-yaml: 2.2.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - react + - react-dom + + '@tapjs/error-serdes@2.0.1': + dependencies: + minipass: 7.1.2 + + '@tapjs/filter@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/fixture@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + mkdirp: 3.0.1 + rimraf: 5.0.10 + + '@tapjs/intercept@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + + '@tapjs/mock@2.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + resolve-import: 1.4.6 + walk-up-path: 3.0.1 + + '@tapjs/node-serialize@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/error-serdes': 2.0.1 + '@tapjs/stack': 2.0.1 + tap-parser: 16.0.1 + + '@tapjs/processinfo@3.1.8': + dependencies: + pirates: 4.0.6 + process-on-spawn: 1.0.0 + signal-exit: 4.1.0 + uuid: 8.3.2 + + '@tapjs/reporter@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))': + dependencies: + '@tapjs/config': 3.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/stack': 2.0.1 + chalk: 5.3.0 + ink: 4.4.1(react@18.3.1) + minipass: 7.1.2 + ms: 2.1.3 + patch-console: 2.0.0 + prismjs-terminal: 1.2.3 + react: 18.3.1 + string-length: 6.0.0 + tap-parser: 16.0.1 + tap-yaml: 2.2.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - '@tapjs/test' + - '@types/react' + - bufferutil + - react-devtools-core + - react-dom + - utf-8-validate + + '@tapjs/run@2.1.7(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/before': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/config': 3.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/processinfo': 3.1.8 + '@tapjs/reporter': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1)) + '@tapjs/spawn': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/stdin': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + c8: 9.1.0 + chalk: 5.3.0 + chokidar: 3.6.0 + foreground-child: 3.3.0 + glob: 10.4.5 + minipass: 7.1.2 + mkdirp: 3.0.1 + opener: 1.5.2 + pacote: 17.0.7 + resolve-import: 1.4.6 + rimraf: 5.0.10 + semver: 7.6.2 + signal-exit: 4.1.0 + tap-parser: 16.0.1 + tap-yaml: 2.2.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + which: 4.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - utf-8-validate + + '@tapjs/snapshot@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + is-actual-promise: 1.0.2 + tcompare: 7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + trivial-deferred: 2.0.0 + transitivePeerDependencies: + - react + - react-dom + + '@tapjs/spawn@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/stack@2.0.1': {} + + '@tapjs/stdin@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tapjs/test@2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@isaacs/ts-node-temp-fork-for-pr-2009': 10.9.7(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/after-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/asserts': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/before': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/before-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/chdir': 1.1.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/filter': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/fixture': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/intercept': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/mock': 2.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/node-serialize': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/snapshot': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/spawn': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/stdin': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/typescript': 1.4.13(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/worker': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + glob: 10.4.5 + jackspeak: 3.4.3 + mkdirp: 3.0.1 + package-json-from-dist: 1.0.0 + resolve-import: 1.4.6 + rimraf: 5.0.10 + sync-content: 1.0.2 + tap-parser: 16.0.1 + tshy: 1.18.0 + typescript: 5.4.5 + walk-up-path: 3.0.1 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - react + - react-dom + + '@tapjs/typescript@1.4.13(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(typescript@5.4.5)': + dependencies: + '@isaacs/ts-node-temp-fork-for-pr-2009': 10.9.7(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript + + '@tapjs/worker@2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + dependencies: + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + + '@tsconfig/node10@1.0.11': {} + + '@tsconfig/node12@1.0.11': {} + + '@tsconfig/node14@1.0.3': {} + + '@tsconfig/node14@14.1.2': {} + + '@tsconfig/node16@1.0.4': {} + + '@tsconfig/node16@16.1.3': {} + + '@tsconfig/node18@18.2.4': {} + + '@tsconfig/node20@20.1.4': {} + + '@tufjs/canonical-json@2.0.0': {} + + '@tufjs/models@2.0.1': + dependencies: + '@tufjs/canonical-json': 2.0.0 + minimatch: 9.0.5 + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.24.5 @@ -1945,12 +6533,25 @@ snapshots: dependencies: '@babel/types': 7.24.5 + '@types/cacheable-request@6.0.3': + dependencies: + '@types/http-cache-semantics': 4.0.4 + '@types/keyv': 3.1.4 + '@types/node': 20.14.9 + '@types/responselike': 1.0.3 + + '@types/cli-progress@3.11.6': + dependencies: + '@types/node': 20.14.9 + '@types/cookiejar@2.1.5': {} '@types/graceful-fs@4.1.9': dependencies: '@types/node': 20.14.9 + '@types/http-cache-semantics@4.0.4': {} + '@types/istanbul-lib-coverage@2.0.6': {} '@types/istanbul-lib-report@3.0.3': @@ -1966,6 +6567,10 @@ snapshots: expect: 29.7.0 pretty-format: 29.7.0 + '@types/keyv@3.1.4': + dependencies: + '@types/node': 20.14.9 + '@types/methods@1.1.4': {} '@types/node-fetch@2.6.11': @@ -1987,6 +6592,10 @@ snapshots: '@types/phoenix@1.6.5': {} + '@types/responselike@1.0.3': + dependencies: + '@types/node': 20.14.9 + '@types/stack-utils@2.0.3': {} '@types/superagent@8.1.6': @@ -2010,20 +6619,76 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 + '@typescript-eslint/types@5.62.0': {} + + '@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5)': + dependencies: + '@typescript-eslint/types': 5.62.0 + '@typescript-eslint/visitor-keys': 5.62.0 + debug: 4.3.4(supports-color@8.1.1) + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.6.2 + tsutils: 3.21.0(typescript@5.4.5) + optionalDependencies: + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@5.62.0': + dependencies: + '@typescript-eslint/types': 5.62.0 + eslint-visitor-keys: 3.4.3 + + abbrev@2.0.0: {} + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 + acorn-jsx@5.3.2(acorn@8.12.1): + dependencies: + acorn: 8.12.1 + + acorn-walk@8.3.3: + dependencies: + acorn: 8.12.1 + + acorn@8.12.1: {} + + agent-base@6.0.2: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + agent-base@7.1.1: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + agentkeepalive@4.5.0: dependencies: humanize-ms: 1.2.1 + aggregate-error@3.1.0: + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + + amdefine@1.0.1: {} + ansi-escapes@4.3.2: dependencies: type-fest: 0.21.3 + ansi-escapes@6.2.1: {} + ansi-regex@5.0.1: {} + ansi-regex@6.0.1: {} + ansi-styles@3.2.1: dependencies: color-convert: 1.9.3 @@ -2034,19 +6699,276 @@ snapshots: ansi-styles@5.2.0: {} + ansi-styles@6.2.1: {} + + ansicolors@0.3.2: {} + anymatch@3.1.3: dependencies: normalize-path: 3.0.0 picomatch: 2.3.1 + app-module-path@2.2.0: {} + + archiver-utils@2.1.0: + dependencies: + glob: 7.2.3 + graceful-fs: 4.2.11 + lazystream: 1.0.1 + lodash.defaults: 4.2.0 + lodash.difference: 4.5.0 + lodash.flatten: 4.4.0 + lodash.isplainobject: 4.0.6 + lodash.union: 4.6.0 + normalize-path: 3.0.0 + readable-stream: 2.3.8 + + archiver-utils@3.0.4: + dependencies: + glob: 7.2.3 + graceful-fs: 4.2.11 + lazystream: 1.0.1 + lodash.defaults: 4.2.0 + lodash.difference: 4.5.0 + lodash.flatten: 4.4.0 + lodash.isplainobject: 4.0.6 + lodash.union: 4.6.0 + normalize-path: 3.0.0 + readable-stream: 3.6.2 + + archiver@5.3.2: + dependencies: + archiver-utils: 2.1.0 + async: 3.2.5 + buffer-crc32: 0.2.13 + readable-stream: 3.6.2 + readdir-glob: 1.1.3 + tar-stream: 2.2.0 + zip-stream: 4.1.1 + + arg@4.1.3: {} + argparse@1.0.10: dependencies: sprintf-js: 1.0.3 + array-union@2.1.0: {} + + arrivals@2.1.2: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + nanotimer: 0.3.14 + transitivePeerDependencies: + - supports-color + + artillery-engine-playwright@1.16.0: + dependencies: + '@playwright/browser-chromium': 1.45.3 + '@playwright/test': 1.45.3 + debug: 4.3.4(supports-color@8.1.1) + playwright: 1.45.3 + transitivePeerDependencies: + - supports-color + + artillery-plugin-apdex@1.10.0(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5): + dependencies: + tap: 19.2.5(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5) + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - typescript + - utf-8-validate + + artillery-plugin-ensure@1.13.0: + dependencies: + chalk: 2.4.2 + debug: 4.3.4(supports-color@8.1.1) + filtrex: 2.2.3 + transitivePeerDependencies: + - supports-color + + artillery-plugin-expect@2.13.0: + dependencies: + chalk: 4.1.2 + debug: 4.3.4(supports-color@8.1.1) + jmespath: 0.16.0 + lodash: 4.17.21 + transitivePeerDependencies: + - supports-color + + artillery-plugin-fake-data@1.10.0: + dependencies: + '@ngneat/falso': 7.2.0 + + artillery-plugin-metrics-by-endpoint@1.13.0: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + artillery-plugin-publish-metrics@2.24.0: + dependencies: + '@aws-sdk/client-cloudwatch': 3.629.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-grpc': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-proto': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc': 0.43.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': 0.41.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-zipkin': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + async: 2.6.4 + datadog-metrics: 0.9.3 + debug: 4.3.4(supports-color@8.1.1) + dogapi: 2.8.4 + hot-shots: 6.8.7 + lightstep-tracer: 0.31.2 + mixpanel: 0.13.0 + opentracing: 0.14.7 + prom-client: 14.2.0 + semver: 7.6.2 + uuid: 8.3.2 + transitivePeerDependencies: + - aws-crt + - bufferutil + - supports-color + - utf-8-validate + + artillery-plugin-slack@1.8.0: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + got: 11.8.6 + transitivePeerDependencies: + - supports-color + + artillery@2.0.19(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5): + dependencies: + '@artilleryio/int-commons': 2.10.0 + '@artilleryio/int-core': 2.14.0 + '@aws-sdk/credential-providers': 3.630.0(@aws-sdk/client-sso-oidc@3.629.0(@aws-sdk/client-sts@3.629.0)) + '@azure/arm-containerinstance': 9.1.0 + '@azure/identity': 4.4.1 + '@azure/storage-blob': 12.24.0 + '@azure/storage-queue': 12.23.0 + '@oclif/core': 2.16.0(@types/node@20.14.9)(typescript@5.4.5) + '@oclif/plugin-help': 5.2.20(@types/node@20.14.9)(typescript@5.4.5) + '@oclif/plugin-not-found': 2.4.3(@types/node@20.14.9)(typescript@5.4.5) + archiver: 5.3.2 + artillery-engine-playwright: 1.16.0 + artillery-plugin-apdex: 1.10.0(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5) + artillery-plugin-ensure: 1.13.0 + artillery-plugin-expect: 2.13.0 + artillery-plugin-fake-data: 1.10.0 + artillery-plugin-metrics-by-endpoint: 1.13.0 + artillery-plugin-publish-metrics: 2.24.0 + artillery-plugin-slack: 1.8.0 + async: 2.6.4 + aws-sdk: 2.1674.0 + chalk: 2.4.2 + chokidar: 3.6.0 + ci-info: 3.9.0 + cli-table3: 0.6.5 + cross-spawn: 7.0.3 + csv-parse: 4.16.3 + debug: 4.3.4(supports-color@8.1.1) + dependency-tree: 10.0.9 + detective-es6: 4.0.1 + dotenv: 16.4.5 + driftless: 2.0.3 + esbuild-wasm: 0.19.12 + eventemitter3: 4.0.7 + fs-extra: 10.1.0 + got: 11.8.6 + joi: 17.13.3 + js-yaml: 3.14.1 + jsonwebtoken: 9.0.2 + lodash: 4.17.21 + moment: 2.30.1 + nanoid: 3.3.7 + ora: 4.1.1 + posthog-node: 2.6.0(debug@4.3.4) + rc: 1.2.8 + sqs-consumer: 5.8.0(aws-sdk@2.1674.0) + temp: 0.9.4 + tmp: 0.2.1 + try-require: 1.2.1 + walk-sync: 0.2.7 + yaml-js: 0.2.3 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - aws-crt + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - typescript + - utf-8-validate + asap@2.0.6: {} + ast-module-types@5.0.0: {} + + astral-regex@2.0.0: {} + + async-hook-domain@4.0.1: {} + + async-limiter@1.0.1: {} + + async@1.5.0: {} + + async@2.6.4: + dependencies: + lodash: 4.17.21 + + async@3.2.5: {} + asynckit@0.4.0: {} + auto-bind@5.0.1: {} + + available-typed-arrays@1.0.7: + dependencies: + possible-typed-array-names: 1.0.0 + + aws-sdk@2.1674.0: + dependencies: + buffer: 4.9.2 + events: 1.1.1 + ieee754: 1.1.13 + jmespath: 0.16.0 + querystring: 0.2.0 + sax: 1.2.1 + url: 0.10.3 + util: 0.12.5 + uuid: 8.0.0 + xml2js: 0.6.2 + + axios@0.27.2(debug@4.3.4): + dependencies: + follow-redirects: 1.15.6(debug@4.3.4) + form-data: 4.0.0 + transitivePeerDependencies: + - debug + babel-jest@29.7.0(@babel/core@7.24.5): dependencies: '@babel/core': 7.24.5 @@ -2101,15 +7023,44 @@ snapshots: balanced-match@1.0.2: {} + base64-js@1.5.1: {} + + bignumber.js@9.1.2: {} + + binary-extensions@2.3.0: {} + + bindings@1.5.0: + dependencies: + file-uri-to-path: 1.0.0 + optional: true + + bintrees@1.0.2: {} + + bl@4.1.0: + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + + boolbase@1.0.0: {} + + bowser@2.11.0: {} + brace-expansion@1.1.11: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 + brace-expansion@2.0.1: + dependencies: + balanced-match: 1.0.2 + braces@3.0.2: dependencies: fill-range: 7.0.1 + browser-or-node@1.3.0: {} + browserslist@4.23.0: dependencies: caniuse-lite: 1.0.30001615 @@ -2125,8 +7076,64 @@ snapshots: dependencies: node-int64: 0.4.0 + buffer-crc32@0.2.13: {} + + buffer-equal-constant-time@1.0.1: {} + buffer-from@1.1.2: {} + buffer@4.9.2: + dependencies: + base64-js: 1.5.1 + ieee754: 1.1.13 + isarray: 1.0.0 + + buffer@5.7.1: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + c8@9.1.0: + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@istanbuljs/schema': 0.1.3 + find-up: 5.0.0 + foreground-child: 3.3.0 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-report: 3.0.1 + istanbul-reports: 3.1.7 + test-exclude: 6.0.0 + v8-to-istanbul: 9.2.0 + yargs: 17.7.2 + yargs-parser: 21.1.1 + + cacache@18.0.4: + dependencies: + '@npmcli/fs': 3.1.1 + fs-minipass: 3.0.3 + glob: 10.4.5 + lru-cache: 10.4.3 + minipass: 7.1.2 + minipass-collect: 2.0.1 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + p-map: 4.0.0 + ssri: 10.0.6 + tar: 6.2.1 + unique-filename: 3.0.0 + + cacheable-lookup@5.0.4: {} + + cacheable-request@7.0.4: + dependencies: + clone-response: 1.0.3 + get-stream: 5.2.0 + http-cache-semantics: 4.1.1 + keyv: 4.5.4 + lowercase-keys: 2.0.0 + normalize-url: 6.1.0 + responselike: 2.0.1 + call-bind@1.0.7: dependencies: es-define-property: 1.0.0 @@ -2143,31 +7150,123 @@ snapshots: caniuse-lite@1.0.30001615: {} + cardinal@2.1.1: + dependencies: + ansicolors: 0.3.2 + redeyed: 2.1.1 + chalk@2.4.2: dependencies: ansi-styles: 3.2.1 escape-string-regexp: 1.0.5 supports-color: 5.5.0 + chalk@3.0.0: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + chalk@4.1.2: dependencies: ansi-styles: 4.3.0 supports-color: 7.2.0 + chalk@5.3.0: {} + char-regex@1.0.2: {} + cheerio-select@2.1.0: + dependencies: + boolbase: 1.0.0 + css-select: 5.1.0 + css-what: 6.1.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.1.0 + + cheerio@1.0.0: + dependencies: + cheerio-select: 2.1.0 + dom-serializer: 2.0.0 + domhandler: 5.0.3 + domutils: 3.1.0 + encoding-sniffer: 0.2.0 + htmlparser2: 9.1.0 + parse5: 7.1.2 + parse5-htmlparser2-tree-adapter: 7.0.0 + parse5-parser-stream: 7.1.2 + undici: 6.19.7 + whatwg-mimetype: 4.0.0 + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + chownr@2.0.0: {} + ci-info@3.9.0: {} cjs-module-lexer@1.3.1: {} + clean-stack@2.2.0: {} + + clean-stack@3.0.1: + dependencies: + escape-string-regexp: 4.0.0 + + cli-boxes@3.0.0: {} + + cli-cursor@3.1.0: + dependencies: + restore-cursor: 3.1.0 + + cli-cursor@4.0.0: + dependencies: + restore-cursor: 4.0.0 + + cli-progress@3.12.0: + dependencies: + string-width: 4.2.3 + + cli-spinners@2.9.2: {} + + cli-table3@0.6.5: + dependencies: + string-width: 4.2.3 + optionalDependencies: + '@colors/colors': 1.5.0 + + cli-truncate@3.1.0: + dependencies: + slice-ansi: 5.0.0 + string-width: 5.1.2 + cliui@8.0.1: dependencies: string-width: 4.2.3 strip-ansi: 6.0.1 wrap-ansi: 7.0.0 + clone-response@1.0.3: + dependencies: + mimic-response: 1.0.1 + + clone@1.0.4: {} + co@4.6.0: {} + code-excerpt@4.0.0: + dependencies: + convert-to-spaces: 2.0.1 + collect-v8-coverage@1.0.2: {} color-convert@1.9.3: @@ -2186,21 +7285,50 @@ snapshots: dependencies: delayed-stream: 1.0.0 + commander@10.0.1: {} + component-emitter@1.3.1: {} + compress-commons@4.1.2: + dependencies: + buffer-crc32: 0.2.13 + crc32-stream: 4.0.3 + normalize-path: 3.0.0 + readable-stream: 3.6.2 + concat-map@0.0.1: {} convert-source-map@2.0.0: {} + convert-to-spaces@2.0.1: {} + + cookie-parser@1.4.6: + dependencies: + cookie: 0.4.1 + cookie-signature: 1.0.6 + + cookie-signature@1.0.6: {} + + cookie@0.4.1: {} + cookiejar@2.1.4: {} - create-jest@29.7.0(@types/node@20.14.9): + core-util-is@1.0.3: {} + + crc-32@1.2.2: {} + + crc32-stream@4.0.3: + dependencies: + crc-32: 1.2.2 + readable-stream: 3.6.2 + + create-jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: '@jest/types': 29.6.3 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 - jest-config: 29.7.0(@types/node@20.14.9) + jest-config: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-util: 29.7.0 prompts: 2.4.2 transitivePeerDependencies: @@ -2209,30 +7337,127 @@ snapshots: - supports-color - ts-node + create-require@1.1.1: {} + cross-spawn@7.0.3: dependencies: path-key: 3.1.1 shebang-command: 2.0.0 which: 2.0.2 - debug@4.3.4: + css-select@5.1.0: + dependencies: + boolbase: 1.0.0 + css-what: 6.1.0 + domhandler: 5.0.3 + domutils: 3.1.0 + nth-check: 2.1.1 + + css-what@6.1.0: {} + + csv-parse@4.16.3: {} + + datadog-metrics@0.9.3: + dependencies: + debug: 3.1.0 + dogapi: 2.8.4 + transitivePeerDependencies: + - supports-color + + debug@3.1.0: + dependencies: + ms: 2.0.0 + + debug@4.3.4(supports-color@8.1.1): dependencies: ms: 2.1.2 + optionalDependencies: + supports-color: 8.1.1 + + decompress-response@6.0.0: + dependencies: + mimic-response: 3.1.0 dedent@1.5.3: {} + deep-extend@0.6.0: {} + + deep-for-each@3.0.0: + dependencies: + lodash.isplainobject: 4.0.6 + deepmerge@4.3.1: {} + defaults@1.0.4: + dependencies: + clone: 1.0.4 + + defer-to-connect@2.0.1: {} + define-data-property@1.1.4: dependencies: es-define-property: 1.0.0 es-errors: 1.3.0 gopd: 1.0.1 + define-lazy-prop@2.0.0: {} + delayed-stream@1.0.0: {} + dependency-tree@10.0.9: + dependencies: + commander: 10.0.1 + filing-cabinet: 4.2.0 + precinct: 11.0.5 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + detect-newline@3.1.0: {} + detective-amd@5.0.2: + dependencies: + ast-module-types: 5.0.0 + escodegen: 2.1.0 + get-amd-module-type: 5.0.1 + node-source-walk: 6.0.2 + + detective-cjs@5.0.1: + dependencies: + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + + detective-es6@4.0.1: + dependencies: + node-source-walk: 6.0.2 + + detective-postcss@6.1.3: + dependencies: + is-url: 1.2.4 + postcss: 8.4.41 + postcss-values-parser: 6.0.2(postcss@8.4.41) + + detective-sass@5.0.3: + dependencies: + gonzales-pe: 4.3.0 + node-source-walk: 6.0.2 + + detective-scss@4.0.3: + dependencies: + gonzales-pe: 4.3.0 + node-source-walk: 6.0.2 + + detective-stylus@4.0.0: {} + + detective-typescript@11.2.0: + dependencies: + '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5) + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + dezalgo@1.0.4: dependencies: asap: 2.0.6 @@ -2240,14 +7465,105 @@ snapshots: diff-sequences@29.6.3: {} + diff@4.0.2: {} + + diff@5.2.0: {} + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + dogapi@2.8.4: + dependencies: + extend: 3.0.2 + json-bigint: 1.0.0 + lodash: 4.17.21 + minimist: 1.2.8 + rc: 1.2.8 + + dom-serializer@2.0.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + entities: 4.5.0 + + domelementtype@2.3.0: {} + + domhandler@5.0.3: + dependencies: + domelementtype: 2.3.0 + + domutils@3.1.0: + dependencies: + dom-serializer: 2.0.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + dotenv@16.4.5: {} + driftless@2.0.3: + dependencies: + present: 0.0.3 + + eastasianwidth@0.2.0: {} + + ecdsa-sig-formatter@1.0.11: + dependencies: + safe-buffer: 5.2.1 + + ejs@3.1.10: + dependencies: + jake: 10.9.2 + electron-to-chromium@1.4.754: {} emittery@0.13.1: {} emoji-regex@8.0.0: {} + emoji-regex@9.2.2: {} + + encoding-sniffer@0.2.0: + dependencies: + iconv-lite: 0.6.3 + whatwg-encoding: 3.1.1 + + encoding@0.1.13: + dependencies: + iconv-lite: 0.6.3 + optional: true + + end-of-stream@1.4.4: + dependencies: + once: 1.4.0 + + engine.io-client@6.5.4: + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.4(supports-color@8.1.1) + engine.io-parser: 5.2.3 + ws: 8.17.1 + xmlhttprequest-ssl: 2.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + engine.io-parser@5.2.3: {} + + enhanced-resolve@5.17.1: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.2.1 + + ensure-posix-path@1.1.1: {} + + entities@4.5.0: {} + + env-paths@2.2.1: {} + + err-code@2.0.3: {} + error-ex@1.3.2: dependencies: is-arrayish: 0.2.1 @@ -2258,16 +7574,50 @@ snapshots: es-errors@1.3.0: {} + esbuild-wasm@0.19.12: {} + escalade@3.1.2: {} escape-string-regexp@1.0.5: {} escape-string-regexp@2.0.0: {} + escape-string-regexp@4.0.0: {} + + escodegen@2.1.0: + dependencies: + esprima: 4.0.1 + estraverse: 5.3.0 + esutils: 2.0.3 + optionalDependencies: + source-map: 0.6.1 + + eslint-visitor-keys@3.4.3: {} + + espree@9.6.1: + dependencies: + acorn: 8.12.1 + acorn-jsx: 5.3.2(acorn@8.12.1) + eslint-visitor-keys: 3.4.3 + esprima@4.0.1: {} + estraverse@5.3.0: {} + + esutils@2.0.3: {} + event-target-shim@5.0.1: {} + eventemitter3@1.1.1: {} + + eventemitter3@4.0.7: {} + + events-to-array@2.0.3: {} + + events@1.1.1: {} + + events@3.3.0: {} + execa@5.1.1: dependencies: cross-spawn: 7.0.3 @@ -2290,25 +7640,105 @@ snapshots: jest-message-util: 29.7.0 jest-util: 29.7.0 + exponential-backoff@3.1.1: {} + + extend@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.2: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + fast-json-stable-stringify@2.1.0: {} + fast-levenshtein@3.0.0: + dependencies: + fastest-levenshtein: 1.0.16 + fast-safe-stringify@2.1.1: {} + fast-xml-parser@4.4.1: + dependencies: + strnum: 1.0.5 + + fastest-levenshtein@1.0.16: {} + + fastq@1.17.1: + dependencies: + reusify: 1.0.4 + fb-watchman@2.0.2: dependencies: bser: 2.1.1 + fflate@0.8.1: {} + + file-uri-to-path@1.0.0: + optional: true + + filelist@1.0.4: + dependencies: + minimatch: 5.1.6 + + filing-cabinet@4.2.0: + dependencies: + app-module-path: 2.2.0 + commander: 10.0.1 + enhanced-resolve: 5.17.1 + is-relative-path: 1.0.2 + module-definition: 5.0.1 + module-lookup-amd: 8.0.5 + resolve: 1.22.8 + resolve-dependency-path: 3.0.2 + sass-lookup: 5.0.1 + stylus-lookup: 5.0.1 + tsconfig-paths: 4.2.0 + typescript: 5.4.5 + fill-range@7.0.1: dependencies: to-regex-range: 5.0.1 + filtrex@0.5.4: {} + + filtrex@2.2.3: {} + find-up@4.1.0: dependencies: locate-path: 5.0.0 path-exists: 4.0.0 + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + follow-redirects@1.15.6(debug@4.3.4): + optionalDependencies: + debug: 4.3.4(supports-color@8.1.1) + + for-each@0.3.3: + dependencies: + is-callable: 1.2.7 + + foreground-child@3.3.0: + dependencies: + cross-spawn: 7.0.3 + signal-exit: 4.1.0 + form-data-encoder@1.7.2: {} + form-data@3.0.1: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + form-data@4.0.0: dependencies: asynckit: 0.4.0 @@ -2326,6 +7756,24 @@ snapshots: hexoid: 1.0.0 once: 1.4.0 + fromentries@1.3.2: {} + + fs-constants@1.0.0: {} + + fs-extra@10.1.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.1.0 + universalify: 2.0.1 + + fs-minipass@2.1.0: + dependencies: + minipass: 3.3.6 + + fs-minipass@3.0.3: + dependencies: + minipass: 7.1.2 + fs.realpath@1.0.0: {} fsevents@2.3.2: @@ -2336,8 +7784,15 @@ snapshots: function-bind@1.1.2: {} + function-loop@4.0.0: {} + gensync@1.0.0-beta.2: {} + get-amd-module-type@5.0.1: + dependencies: + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + get-caller-file@2.0.5: {} get-intrinsic@1.2.4: @@ -2348,10 +7803,29 @@ snapshots: has-symbols: 1.0.3 hasown: 2.0.2 + get-own-enumerable-property-symbols@3.0.2: {} + get-package-type@0.1.0: {} + get-stream@5.2.0: + dependencies: + pump: 3.0.0 + get-stream@6.0.1: {} + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob@10.4.5: + dependencies: + foreground-child: 3.3.0 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.0 + path-scurry: 1.11.1 + glob@7.2.3: dependencies: fs.realpath: 1.0.0 @@ -2363,10 +7837,39 @@ snapshots: globals@11.12.0: {} + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.2 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 3.0.0 + + gonzales-pe@4.3.0: + dependencies: + minimist: 1.2.8 + + google-protobuf@3.6.1: {} + gopd@1.0.1: dependencies: get-intrinsic: 1.2.4 + got@11.8.6: + dependencies: + '@sindresorhus/is': 4.6.0 + '@szmarczak/http-timer': 4.0.6 + '@types/cacheable-request': 6.0.3 + '@types/responselike': 1.0.3 + cacheable-lookup: 5.0.4 + cacheable-request: 7.0.4 + decompress-response: 6.0.0 + http2-wrapper: 1.0.3 + lowercase-keys: 2.0.0 + p-cancelable: 2.1.1 + responselike: 2.0.1 + graceful-fs@4.2.11: {} has-flag@3.0.0: {} @@ -2381,20 +7884,94 @@ snapshots: has-symbols@1.0.3: {} + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.0.3 + hasown@2.0.2: dependencies: function-bind: 1.1.2 + hex2dec@1.0.1: {} + hexoid@1.0.0: {} + hosted-git-info@7.0.2: + dependencies: + lru-cache: 10.4.3 + + hot-shots@6.8.7: + optionalDependencies: + unix-dgram: 2.0.6 + + hpagent@0.1.2: {} + html-escaper@2.0.2: {} + htmlparser2@9.1.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.1.0 + entities: 4.5.0 + + http-cache-semantics@4.1.1: {} + + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.1 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + http2-wrapper@1.0.3: + dependencies: + quick-lru: 5.1.1 + resolve-alpn: 1.2.1 + + https-proxy-agent@5.0.0: + dependencies: + agent-base: 6.0.2 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + https-proxy-agent@5.0.1: + dependencies: + agent-base: 6.0.2 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + https-proxy-agent@7.0.5: + dependencies: + agent-base: 7.1.1 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + human-signals@2.1.0: {} humanize-ms@1.2.1: dependencies: ms: 2.1.3 + hyperlinker@1.0.0: {} + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + ieee754@1.1.13: {} + + ieee754@1.2.1: {} + + ignore-walk@6.0.5: + dependencies: + minimatch: 9.0.5 + + ignore@5.3.2: {} + import-local@3.1.0: dependencies: pkg-dir: 4.2.0 @@ -2402,6 +7979,10 @@ snapshots: imurmurhash@0.1.4: {} + indent-string@4.0.0: {} + + indent-string@5.0.0: {} + inflight@1.0.6: dependencies: once: 1.4.0 @@ -2409,22 +7990,134 @@ snapshots: inherits@2.0.4: {} + ini@1.3.8: {} + + ini@4.1.3: {} + + ink@4.4.1(react@18.3.1): + dependencies: + '@alcalzone/ansi-tokenize': 0.1.3 + ansi-escapes: 6.2.1 + auto-bind: 5.0.1 + chalk: 5.3.0 + cli-boxes: 3.0.0 + cli-cursor: 4.0.0 + cli-truncate: 3.1.0 + code-excerpt: 4.0.0 + indent-string: 5.0.0 + is-ci: 3.0.1 + is-lower-case: 2.0.2 + is-upper-case: 2.0.2 + lodash: 4.17.21 + patch-console: 2.0.0 + react: 18.3.1 + react-reconciler: 0.29.2(react@18.3.1) + scheduler: 0.23.2 + signal-exit: 3.0.7 + slice-ansi: 6.0.0 + stack-utils: 2.0.6 + string-width: 5.1.2 + type-fest: 0.12.0 + widest-line: 4.0.1 + wrap-ansi: 8.1.0 + ws: 8.17.1 + yoga-wasm-web: 0.3.3 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + ip-address@9.0.5: + dependencies: + jsbn: 1.1.0 + sprintf-js: 1.1.3 + + is-actual-promise@1.0.2: {} + + is-arguments@1.1.1: + dependencies: + call-bind: 1.0.7 + has-tostringtag: 1.0.2 + is-arrayish@0.2.1: {} + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-callable@1.2.7: {} + + is-ci@3.0.1: + dependencies: + ci-info: 3.9.0 + is-core-module@2.13.1: dependencies: hasown: 2.0.2 + is-docker@2.2.1: {} + + is-extglob@2.1.1: {} + is-fullwidth-code-point@3.0.0: {} + is-fullwidth-code-point@4.0.0: {} + is-generator-fn@2.1.0: {} + is-generator-function@1.0.10: + dependencies: + has-tostringtag: 1.0.2 + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-interactive@1.0.0: {} + + is-lambda@1.0.1: {} + + is-lower-case@2.0.2: + dependencies: + tslib: 2.6.3 + is-number@7.0.0: {} + is-obj@1.0.1: {} + + is-plain-object@5.0.0: {} + + is-regexp@1.0.0: {} + + is-relative-path@1.0.2: {} + is-stream@2.0.1: {} + is-typed-array@1.1.13: + dependencies: + which-typed-array: 1.1.15 + + is-upper-case@2.0.2: + dependencies: + tslib: 2.6.3 + + is-url-superb@4.0.0: {} + + is-url@1.2.4: {} + + is-wsl@2.2.0: + dependencies: + is-docker: 2.2.1 + + isarray@1.0.0: {} + isexe@2.0.0: {} + isexe@3.1.1: {} + + isomorphic-ws@4.0.1(ws@5.2.4): + dependencies: + ws: 5.2.4 + istanbul-lib-coverage@3.2.2: {} istanbul-lib-instrument@5.2.1: @@ -2455,7 +8148,7 @@ snapshots: istanbul-lib-source-maps@4.0.1: dependencies: - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) istanbul-lib-coverage: 3.2.2 source-map: 0.6.1 transitivePeerDependencies: @@ -2466,6 +8159,19 @@ snapshots: html-escaper: 2.0.2 istanbul-lib-report: 3.0.1 + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jake@10.9.2: + dependencies: + async: 3.2.5 + chalk: 4.1.2 + filelist: 1.0.4 + minimatch: 3.1.2 + jest-changed-files@29.7.0: dependencies: execa: 5.1.1 @@ -2498,16 +8204,16 @@ snapshots: - babel-plugin-macros - supports-color - jest-cli@29.7.0(@types/node@20.14.9): + jest-cli@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: - '@jest/core': 29.7.0 + '@jest/core': 29.7.0(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 chalk: 4.1.2 - create-jest: 29.7.0(@types/node@20.14.9) + create-jest: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) exit: 0.1.2 import-local: 3.1.0 - jest-config: 29.7.0(@types/node@20.14.9) + jest-config: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-util: 29.7.0 jest-validate: 29.7.0 yargs: 17.7.2 @@ -2517,7 +8223,7 @@ snapshots: - supports-color - ts-node - jest-config@29.7.0(@types/node@20.14.9): + jest-config@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: '@babel/core': 7.24.5 '@jest/test-sequencer': 29.7.0 @@ -2543,6 +8249,7 @@ snapshots: strip-json-comments: 3.1.1 optionalDependencies: '@types/node': 20.14.9 + ts-node: 10.9.2(@types/node@20.14.9)(typescript@5.4.5) transitivePeerDependencies: - babel-plugin-macros - supports-color @@ -2762,18 +8469,28 @@ snapshots: merge-stream: 2.0.0 supports-color: 8.1.1 - jest@29.7.0(@types/node@20.14.9): + jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)): dependencies: - '@jest/core': 29.7.0 + '@jest/core': 29.7.0(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) '@jest/types': 29.6.3 import-local: 3.1.0 - jest-cli: 29.7.0(@types/node@20.14.9) + jest-cli: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) transitivePeerDependencies: - '@types/node' - babel-plugin-macros - supports-color - ts-node + jmespath@0.16.0: {} + + joi@17.13.3: + dependencies: + '@hapi/hoek': 9.3.0 + '@hapi/topo': 5.1.0 + '@sideway/address': 4.1.5 + '@sideway/formula': 3.0.1 + '@sideway/pinpoint': 2.0.0 + js-tokens@4.0.0: {} js-yaml@3.14.1: @@ -2781,24 +8498,146 @@ snapshots: argparse: 1.0.10 esprima: 4.0.1 + jsbn@1.1.0: {} + jsesc@2.5.2: {} + json-bigint@1.0.0: + dependencies: + bignumber.js: 9.1.2 + + json-buffer@3.0.1: {} + json-parse-even-better-errors@2.3.1: {} + json-parse-even-better-errors@3.0.2: {} + json5@2.2.3: {} + jsonfile@6.1.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + + jsonparse@1.3.1: {} + + jsonpath-plus@7.2.0: {} + + jsonwebtoken@9.0.2: + dependencies: + jws: 3.2.2 + lodash.includes: 4.3.0 + lodash.isboolean: 3.0.3 + lodash.isinteger: 4.0.4 + lodash.isnumber: 3.0.3 + lodash.isplainobject: 4.0.6 + lodash.isstring: 4.0.1 + lodash.once: 4.1.1 + ms: 2.1.3 + semver: 7.6.2 + + jwa@1.4.1: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jwa@2.0.0: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jws@3.2.2: + dependencies: + jwa: 1.4.1 + safe-buffer: 5.2.1 + + jws@4.0.0: + dependencies: + jwa: 2.0.0 + safe-buffer: 5.2.1 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + kleur@3.0.3: {} + lazystream@1.0.1: + dependencies: + readable-stream: 2.3.8 + leven@3.1.0: {} + lightstep-tracer@0.31.2: + dependencies: + async: 1.5.0 + eventemitter3: 1.1.1 + google-protobuf: 3.6.1 + hex2dec: 1.0.1 + opentracing: 0.14.7 + source-map-support: 0.3.3 + thrift: 0.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + lines-and-columns@1.2.4: {} locate-path@5.0.0: dependencies: p-locate: 4.1.0 + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.camelcase@4.3.0: {} + + lodash.defaults@4.2.0: {} + + lodash.difference@4.5.0: {} + + lodash.flatten@4.4.0: {} + + lodash.includes@4.3.0: {} + + lodash.isboolean@3.0.3: {} + + lodash.isinteger@4.0.4: {} + + lodash.isnumber@3.0.3: {} + + lodash.isplainobject@4.0.6: {} + + lodash.isstring@4.0.1: {} + lodash.memoize@4.1.2: {} + lodash.merge@4.6.2: {} + + lodash.once@4.1.1: {} + + lodash.union@4.6.0: {} + + lodash@4.17.21: {} + + log-symbols@3.0.0: + dependencies: + chalk: 2.4.2 + + long@5.2.3: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + lowercase-keys@2.0.0: {} + + lru-cache@10.4.3: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 @@ -2809,12 +8648,35 @@ snapshots: make-error@1.3.6: {} + make-fetch-happen@13.0.1: + dependencies: + '@npmcli/agent': 2.2.2 + cacache: 18.0.4 + http-cache-semantics: 4.1.1 + is-lambda: 1.0.1 + minipass: 7.1.2 + minipass-fetch: 3.0.5 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + negotiator: 0.6.3 + proc-log: 4.2.0 + promise-retry: 2.0.1 + ssri: 10.0.6 + transitivePeerDependencies: + - supports-color + makeerror@1.0.12: dependencies: tmpl: 1.0.5 + matcher-collection@1.1.2: + dependencies: + minimatch: 3.1.2 + merge-stream@2.0.0: {} + merge2@1.4.1: {} + methods@1.1.2: {} micromatch@4.0.5: @@ -2832,34 +8694,213 @@ snapshots: mimic-fn@2.1.0: {} + mimic-response@1.0.1: {} + + mimic-response@3.1.0: {} + minimatch@3.1.2: dependencies: brace-expansion: 1.1.11 + minimatch@5.1.6: + dependencies: + brace-expansion: 2.0.1 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.1 + + minimist@1.2.8: {} + + minipass-collect@2.0.1: + dependencies: + minipass: 7.1.2 + + minipass-fetch@3.0.5: + dependencies: + minipass: 7.1.2 + minipass-sized: 1.0.3 + minizlib: 2.1.2 + optionalDependencies: + encoding: 0.1.13 + + minipass-flush@1.0.5: + dependencies: + minipass: 3.3.6 + + minipass-json-stream@1.0.2: + dependencies: + jsonparse: 1.3.1 + minipass: 3.3.6 + + minipass-pipeline@1.2.4: + dependencies: + minipass: 3.3.6 + + minipass-sized@1.0.3: + dependencies: + minipass: 3.3.6 + + minipass@3.3.6: + dependencies: + yallist: 4.0.0 + + minipass@5.0.0: {} + + minipass@7.1.2: {} + + minizlib@2.1.2: + dependencies: + minipass: 3.3.6 + yallist: 4.0.0 + + mixpanel@0.13.0: + dependencies: + https-proxy-agent: 5.0.0 + transitivePeerDependencies: + - supports-color + + mkdirp@0.5.6: + dependencies: + minimist: 1.2.8 + + mkdirp@1.0.4: {} + + mkdirp@3.0.1: {} + + module-definition@5.0.1: + dependencies: + ast-module-types: 5.0.0 + node-source-walk: 6.0.2 + + module-lookup-amd@8.0.5: + dependencies: + commander: 10.0.1 + glob: 7.2.3 + requirejs: 2.3.7 + requirejs-config-file: 4.0.0 + + moment@2.30.1: {} + + ms@2.0.0: {} + ms@2.1.2: {} ms@2.1.3: {} + mute-stream@0.0.8: {} + + nan@2.20.0: + optional: true + + nanoid@3.3.7: {} + + nanotimer@0.3.14: {} + natural-compare@1.4.0: {} + natural-orderby@2.0.3: {} + + negotiator@0.6.3: {} + node-domexception@1.0.0: {} - node-fetch@2.7.0: + node-fetch@2.7.0(encoding@0.1.13): dependencies: whatwg-url: 5.0.0 + optionalDependencies: + encoding: 0.1.13 + + node-gyp@10.2.0: + dependencies: + env-paths: 2.2.1 + exponential-backoff: 3.1.1 + glob: 10.4.5 + graceful-fs: 4.2.11 + make-fetch-happen: 13.0.1 + nopt: 7.2.1 + proc-log: 4.2.0 + semver: 7.6.2 + tar: 6.2.1 + which: 4.0.0 + transitivePeerDependencies: + - supports-color node-int64@0.4.0: {} node-releases@2.0.14: {} + node-source-walk@6.0.2: + dependencies: + '@babel/parser': 7.24.5 + + nopt@7.2.1: + dependencies: + abbrev: 2.0.0 + + normalize-package-data@6.0.2: + dependencies: + hosted-git-info: 7.0.2 + semver: 7.6.2 + validate-npm-package-license: 3.0.4 + normalize-path@3.0.0: {} + normalize-url@6.1.0: {} + + npm-bundled@3.0.1: + dependencies: + npm-normalize-package-bin: 3.0.1 + + npm-install-checks@6.3.0: + dependencies: + semver: 7.6.2 + + npm-normalize-package-bin@3.0.1: {} + + npm-package-arg@11.0.3: + dependencies: + hosted-git-info: 7.0.2 + proc-log: 4.2.0 + semver: 7.6.2 + validate-npm-package-name: 5.0.1 + + npm-packlist@8.0.2: + dependencies: + ignore-walk: 6.0.5 + + npm-pick-manifest@9.1.0: + dependencies: + npm-install-checks: 6.3.0 + npm-normalize-package-bin: 3.0.1 + npm-package-arg: 11.0.3 + semver: 7.6.2 + + npm-registry-fetch@16.2.1: + dependencies: + '@npmcli/redact': 1.1.0 + make-fetch-happen: 13.0.1 + minipass: 7.1.2 + minipass-fetch: 3.0.5 + minipass-json-stream: 1.0.2 + minizlib: 2.1.2 + npm-package-arg: 11.0.3 + proc-log: 4.2.0 + transitivePeerDependencies: + - supports-color + npm-run-path@4.0.1: dependencies: path-key: 3.1.1 + nth-check@2.1.1: + dependencies: + boolbase: 1.0.0 + object-inspect@1.13.1: {} + object-treeify@1.1.33: {} + once@1.4.0: dependencies: wrappy: 1.0.2 @@ -2868,7 +8909,13 @@ snapshots: dependencies: mimic-fn: 2.1.0 - openai@4.52.2: + open@8.4.2: + dependencies: + define-lazy-prop: 2.0.0 + is-docker: 2.2.1 + is-wsl: 2.2.0 + + openai@4.52.2(encoding@0.1.13): dependencies: '@types/node': 18.19.39 '@types/node-fetch': 2.6.11 @@ -2876,11 +8923,28 @@ snapshots: agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 - node-fetch: 2.7.0 + node-fetch: 2.7.0(encoding@0.1.13) web-streams-polyfill: 3.3.3 transitivePeerDependencies: - encoding + opener@1.5.2: {} + + opentracing@0.14.7: {} + + ora@4.1.1: + dependencies: + chalk: 3.0.0 + cli-cursor: 3.1.0 + cli-spinners: 2.9.2 + is-interactive: 1.0.0 + log-symbols: 3.0.0 + mute-stream: 0.0.8 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + + p-cancelable@2.1.1: {} + p-limit@2.3.0: dependencies: p-try: 2.2.0 @@ -2893,8 +8957,42 @@ snapshots: dependencies: p-limit: 2.3.0 + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + p-map@4.0.0: + dependencies: + aggregate-error: 3.1.0 + p-try@2.2.0: {} + package-json-from-dist@1.0.0: {} + + pacote@17.0.7: + dependencies: + '@npmcli/git': 5.0.8 + '@npmcli/installed-package-contents': 2.1.0 + '@npmcli/promise-spawn': 7.0.2 + '@npmcli/run-script': 7.0.4 + cacache: 18.0.4 + fs-minipass: 3.0.3 + minipass: 7.1.2 + npm-package-arg: 11.0.3 + npm-packlist: 8.0.2 + npm-pick-manifest: 9.1.0 + npm-registry-fetch: 16.2.1 + proc-log: 4.2.0 + promise-retry: 2.0.1 + read-package-json: 7.0.1 + read-package-json-fast: 3.0.2 + sigstore: 2.3.1 + ssri: 10.0.6 + tar: 6.2.1 + transitivePeerDependencies: + - bluebird + - supports-color + parse-json@5.2.0: dependencies: '@babel/code-frame': 7.24.2 @@ -2902,6 +9000,26 @@ snapshots: json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 + parse5-htmlparser2-tree-adapter@7.0.0: + dependencies: + domhandler: 5.0.3 + parse5: 7.1.2 + + parse5-parser-stream@7.1.2: + dependencies: + parse5: 7.1.2 + + parse5@7.1.2: + dependencies: + entities: 4.5.0 + + password-prompt@1.1.3: + dependencies: + ansi-escapes: 4.3.2 + cross-spawn: 7.0.3 + + patch-console@2.0.0: {} + path-exists@4.0.0: {} path-is-absolute@1.0.1: {} @@ -2910,8 +9028,17 @@ snapshots: path-parse@1.0.7: {} + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + path-type@4.0.0: {} + picocolors@1.0.0: {} + picocolors@1.0.1: {} + picomatch@2.3.1: {} pirates@4.0.6: {} @@ -2922,39 +9049,239 @@ snapshots: playwright-core@1.45.0: {} + playwright-core@1.45.3: {} + playwright@1.45.0: dependencies: playwright-core: 1.45.0 optionalDependencies: fsevents: 2.3.2 + playwright@1.45.3: + dependencies: + playwright-core: 1.45.3 + optionalDependencies: + fsevents: 2.3.2 + + polite-json@4.0.1: {} + + polite-json@5.0.0: {} + + possible-typed-array-names@1.0.0: {} + + postcss-values-parser@6.0.2(postcss@8.4.41): + dependencies: + color-name: 1.1.4 + is-url-superb: 4.0.0 + postcss: 8.4.41 + quote-unquote: 1.0.0 + + postcss@8.4.41: + dependencies: + nanoid: 3.3.7 + picocolors: 1.0.1 + source-map-js: 1.2.0 + + posthog-node@2.6.0(debug@4.3.4): + dependencies: + axios: 0.27.2(debug@4.3.4) + transitivePeerDependencies: + - debug + + precinct@11.0.5: + dependencies: + '@dependents/detective-less': 4.1.0 + commander: 10.0.1 + detective-amd: 5.0.2 + detective-cjs: 5.0.1 + detective-es6: 4.0.1 + detective-postcss: 6.1.3 + detective-sass: 5.0.3 + detective-scss: 4.0.3 + detective-stylus: 4.0.0 + detective-typescript: 11.2.0 + module-definition: 5.0.1 + node-source-walk: 6.0.2 + transitivePeerDependencies: + - supports-color + + present@0.0.3: {} + pretty-format@29.7.0: dependencies: '@jest/schemas': 29.6.3 ansi-styles: 5.2.0 react-is: 18.3.1 + prismjs-terminal@1.2.3: + dependencies: + chalk: 5.3.0 + prismjs: 1.29.0 + string-length: 6.0.0 + + prismjs@1.29.0: {} + + proc-log@4.2.0: {} + + process-nextick-args@2.0.1: {} + + process-on-spawn@1.0.0: + dependencies: + fromentries: 1.3.2 + + prom-client@14.2.0: + dependencies: + tdigest: 0.1.2 + + promise-inflight@1.0.1: {} + + promise-retry@2.0.1: + dependencies: + err-code: 2.0.3 + retry: 0.12.0 + prompts@2.4.2: dependencies: kleur: 3.0.3 sisteransi: 1.0.5 + protobufjs@7.3.2: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 20.14.9 + long: 5.2.3 + + pump@3.0.0: + dependencies: + end-of-stream: 1.4.4 + once: 1.4.0 + + punycode@1.3.2: {} + pure-rand@6.1.0: {} + q@1.5.1: {} + qs@6.12.1: dependencies: side-channel: 1.0.6 + querystring@0.2.0: {} + + queue-microtask@1.2.3: {} + + quick-lru@5.1.1: {} + + quote-unquote@1.0.0: {} + + rc@1.2.8: + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + + react-dom@18.3.1(react@18.3.1): + dependencies: + loose-envify: 1.4.0 + react: 18.3.1 + scheduler: 0.23.2 + + react-element-to-jsx-string@15.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + '@base2/pretty-print-object': 1.0.1 + is-plain-object: 5.0.0 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-is: 18.1.0 + + react-is@18.1.0: {} + react-is@18.3.1: {} + react-reconciler@0.29.2(react@18.3.1): + dependencies: + loose-envify: 1.4.0 + react: 18.3.1 + scheduler: 0.23.2 + + react@18.3.1: + dependencies: + loose-envify: 1.4.0 + + read-package-json-fast@3.0.2: + dependencies: + json-parse-even-better-errors: 3.0.2 + npm-normalize-package-bin: 3.0.1 + + read-package-json@7.0.1: + dependencies: + glob: 10.4.5 + json-parse-even-better-errors: 3.0.2 + normalize-package-data: 6.0.2 + npm-normalize-package-bin: 3.0.1 + + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readdir-glob@1.1.3: + dependencies: + minimatch: 5.1.6 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + redeyed@2.1.1: + dependencies: + esprima: 4.0.1 + require-directory@2.1.1: {} + requirejs-config-file@4.0.0: + dependencies: + esprima: 4.0.1 + stringify-object: 3.3.0 + + requirejs@2.3.7: {} + + resolve-alpn@1.2.1: {} + resolve-cwd@3.0.0: dependencies: resolve-from: 5.0.0 + resolve-dependency-path@3.0.2: {} + resolve-from@5.0.0: {} + resolve-import@1.4.6: + dependencies: + glob: 10.4.5 + walk-up-path: 3.0.1 + resolve.exports@2.0.2: {} resolve@1.22.8: @@ -2963,6 +9290,58 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + responselike@2.0.1: + dependencies: + lowercase-keys: 2.0.0 + + restore-cursor@3.1.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + + restore-cursor@4.0.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + + retry@0.12.0: {} + + reusify@1.0.4: {} + + rimraf@2.6.3: + dependencies: + glob: 7.2.3 + + rimraf@3.0.2: + dependencies: + glob: 7.2.3 + + rimraf@5.0.10: + dependencies: + glob: 10.4.5 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + sass-lookup@5.0.1: + dependencies: + commander: 10.0.1 + + sax@1.2.1: {} + + scheduler@0.23.2: + dependencies: + loose-envify: 1.4.0 + + seedrandom@3.0.5: {} + semver@6.3.1: {} semver@7.6.2: {} @@ -2991,49 +9370,190 @@ snapshots: signal-exit@3.0.7: {} + signal-exit@4.1.0: {} + + sigstore@2.3.1: + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.2 + '@sigstore/sign': 2.3.2 + '@sigstore/tuf': 2.3.4 + '@sigstore/verify': 1.2.1 + transitivePeerDependencies: + - supports-color + sisteransi@1.0.5: {} slash@3.0.0: {} + slice-ansi@4.0.0: + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + + slice-ansi@5.0.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + + slice-ansi@6.0.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + + smart-buffer@4.2.0: {} + + socket.io-client@4.7.5: + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.4(supports-color@8.1.1) + engine.io-client: 6.5.4 + socket.io-parser: 4.2.4 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + socket.io-parser@4.2.4: + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + socketio-wildcard@2.0.0: {} + + socks-proxy-agent@8.0.4: + dependencies: + agent-base: 7.1.1 + debug: 4.3.4(supports-color@8.1.1) + socks: 2.8.3 + transitivePeerDependencies: + - supports-color + + socks@2.8.3: + dependencies: + ip-address: 9.0.5 + smart-buffer: 4.2.0 + + source-map-js@1.2.0: {} + + source-map-support@0.3.3: + dependencies: + source-map: 0.1.32 + source-map-support@0.5.13: dependencies: buffer-from: 1.1.2 source-map: 0.6.1 + source-map@0.1.32: + dependencies: + amdefine: 1.0.1 + source-map@0.6.1: {} + spdx-correct@3.2.0: + dependencies: + spdx-expression-parse: 3.0.1 + spdx-license-ids: 3.0.18 + + spdx-exceptions@2.5.0: {} + + spdx-expression-parse@3.0.1: + dependencies: + spdx-exceptions: 2.5.0 + spdx-license-ids: 3.0.18 + + spdx-license-ids@3.0.18: {} + sprintf-js@1.0.3: {} + sprintf-js@1.1.3: {} + + sqs-consumer@5.8.0(aws-sdk@2.1674.0): + dependencies: + aws-sdk: 2.1674.0 + debug: 4.3.4(supports-color@8.1.1) + transitivePeerDependencies: + - supports-color + + ssri@10.0.6: + dependencies: + minipass: 7.1.2 + stack-utils@2.0.6: dependencies: escape-string-regexp: 2.0.0 + stoppable@1.1.0: {} + string-length@4.0.2: dependencies: char-regex: 1.0.2 strip-ansi: 6.0.1 + string-length@6.0.0: + dependencies: + strip-ansi: 7.1.0 + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 is-fullwidth-code-point: 3.0.0 strip-ansi: 6.0.1 + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + stringify-object@3.3.0: + dependencies: + get-own-enumerable-property-symbols: 3.0.2 + is-obj: 1.0.1 + is-regexp: 1.0.0 + strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.0.1 + + strip-bom@3.0.0: {} + strip-bom@4.0.0: {} strip-final-newline@2.0.0: {} + strip-json-comments@2.0.1: {} + strip-json-comments@3.1.1: {} + strnum@1.0.5: {} + + stylus-lookup@5.0.1: + dependencies: + commander: 10.0.1 + superagent@9.0.2: dependencies: component-emitter: 1.3.1 cookiejar: 2.1.4 - debug: 4.3.4 + debug: 4.3.4(supports-color@8.1.1) fast-safe-stringify: 2.1.1 form-data: 4.0.0 formidable: 3.5.1 @@ -3062,14 +9582,129 @@ snapshots: dependencies: has-flag: 4.0.0 + supports-hyperlinks@2.3.0: + dependencies: + has-flag: 4.0.0 + supports-color: 7.2.0 + supports-preserve-symlinks-flag@1.0.0: {} + sync-content@1.0.2: + dependencies: + glob: 10.4.5 + mkdirp: 3.0.1 + path-scurry: 1.11.1 + rimraf: 5.0.10 + + tap-parser@16.0.1: + dependencies: + events-to-array: 2.0.3 + tap-yaml: 2.2.2 + + tap-yaml@2.2.2: + dependencies: + yaml: 2.5.0 + yaml-types: 0.3.0(yaml@2.5.0) + + tap@19.2.5(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.4.5): + dependencies: + '@tapjs/after': 1.1.31(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/after-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/asserts': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/before': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/before-each': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/chdir': 1.1.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/core': 2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/filter': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/fixture': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/intercept': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/mock': 2.1.6(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/node-serialize': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/run': 2.1.7(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/snapshot': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/spawn': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/stdin': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + '@tapjs/test': 2.2.4(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tapjs/typescript': 1.4.13(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/node@20.14.9)(typescript@5.4.5) + '@tapjs/worker': 2.0.8(@tapjs/core@2.1.6(@types/node@20.14.9)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + resolve-import: 1.4.6 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - '@types/react' + - bluebird + - bufferutil + - react + - react-devtools-core + - react-dom + - supports-color + - typescript + - utf-8-validate + + tapable@2.2.1: {} + + tar-stream@2.2.0: + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.4 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + + tar@6.2.1: + dependencies: + chownr: 2.0.0 + fs-minipass: 2.1.0 + minipass: 5.0.0 + minizlib: 2.1.2 + mkdirp: 1.0.4 + yallist: 4.0.0 + + tcompare@7.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + diff: 5.2.0 + react-element-to-jsx-string: 15.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + transitivePeerDependencies: + - react + - react-dom + + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + + temp@0.9.4: + dependencies: + mkdirp: 0.5.6 + rimraf: 2.6.3 + test-exclude@6.0.0: dependencies: '@istanbuljs/schema': 0.1.3 glob: 7.2.3 minimatch: 3.1.2 + thrift@0.14.2: + dependencies: + browser-or-node: 1.3.0 + isomorphic-ws: 4.0.1(ws@5.2.4) + node-int64: 0.4.0 + q: 1.5.1 + ws: 5.2.4 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + tldts-core@6.1.39: {} + + tldts@6.1.39: + dependencies: + tldts-core: 6.1.39 + + tmp@0.2.1: + dependencies: + rimraf: 3.0.2 + tmpl@1.0.5: {} to-fast-properties@2.0.0: {} @@ -3078,13 +9713,21 @@ snapshots: dependencies: is-number: 7.0.0 + tough-cookie@5.0.0-rc.4: + dependencies: + tldts: 6.1.39 + tr46@0.0.3: {} - ts-jest@29.1.5(@babel/core@7.24.5)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.5))(jest@29.7.0(@types/node@20.14.9))(typescript@5.4.5): + trivial-deferred@2.0.0: {} + + try-require@1.2.1: {} + + ts-jest@29.1.5(@babel/core@7.24.5)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.5))(jest@29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)))(typescript@5.4.5): dependencies: bs-logger: 0.2.6 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@20.14.9) + jest: 29.7.0(@types/node@20.14.9)(ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5)) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 @@ -3098,51 +9741,201 @@ snapshots: '@jest/types': 29.6.3 babel-jest: 29.7.0(@babel/core@7.24.5) + ts-node@10.9.2(@types/node@20.14.9)(typescript@5.4.5): + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.11 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.4 + '@types/node': 20.14.9 + acorn: 8.12.1 + acorn-walk: 8.3.3 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.4.5 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + + tsconfig-paths@4.2.0: + dependencies: + json5: 2.2.3 + minimist: 1.2.8 + strip-bom: 3.0.0 + + tshy@1.18.0: + dependencies: + chalk: 5.3.0 + chokidar: 3.6.0 + foreground-child: 3.3.0 + minimatch: 9.0.5 + mkdirp: 3.0.1 + polite-json: 5.0.0 + resolve-import: 1.4.6 + rimraf: 5.0.10 + sync-content: 1.0.2 + typescript: 5.4.5 + walk-up-path: 3.0.1 + + tslib@1.14.1: {} + + tslib@2.6.3: {} + + tsutils@3.21.0(typescript@5.4.5): + dependencies: + tslib: 1.14.1 + typescript: 5.4.5 + + tuf-js@2.2.1: + dependencies: + '@tufjs/models': 2.0.1 + debug: 4.3.4(supports-color@8.1.1) + make-fetch-happen: 13.0.1 + transitivePeerDependencies: + - supports-color + type-detect@4.0.8: {} + type-fest@0.12.0: {} + type-fest@0.21.3: {} typescript@5.4.5: {} undici-types@5.26.5: {} + undici@6.19.7: {} + + unique-filename@3.0.0: + dependencies: + unique-slug: 4.0.0 + + unique-slug@4.0.0: + dependencies: + imurmurhash: 0.1.4 + + universalify@2.0.1: {} + + unix-dgram@2.0.6: + dependencies: + bindings: 1.5.0 + nan: 2.20.0 + optional: true + update-browserslist-db@1.0.14(browserslist@4.23.0): dependencies: browserslist: 4.23.0 escalade: 3.1.2 picocolors: 1.0.0 + url@0.10.3: + dependencies: + punycode: 1.3.2 + querystring: 0.2.0 + + util-deprecate@1.0.2: {} + + util@0.12.5: + dependencies: + inherits: 2.0.4 + is-arguments: 1.1.1 + is-generator-function: 1.0.10 + is-typed-array: 1.1.13 + which-typed-array: 1.1.15 + + uuid@8.0.0: {} + + uuid@8.3.2: {} + + uuid@9.0.1: {} + + v8-compile-cache-lib@3.0.1: {} + v8-to-istanbul@9.2.0: dependencies: '@jridgewell/trace-mapping': 0.3.25 '@types/istanbul-lib-coverage': 2.0.6 convert-source-map: 2.0.0 + validate-npm-package-license@3.0.4: + dependencies: + spdx-correct: 3.2.0 + spdx-expression-parse: 3.0.1 + + validate-npm-package-name@5.0.1: {} + + walk-sync@0.2.7: + dependencies: + ensure-posix-path: 1.1.1 + matcher-collection: 1.1.2 + + walk-up-path@3.0.1: {} + walker@1.0.8: dependencies: makeerror: 1.0.12 + wcwidth@1.0.1: + dependencies: + defaults: 1.0.4 + web-streams-polyfill@3.3.3: {} web-streams-polyfill@4.0.0-beta.3: {} webidl-conversions@3.0.1: {} + whatwg-encoding@3.1.1: + dependencies: + iconv-lite: 0.6.3 + + whatwg-mimetype@4.0.0: {} + whatwg-url@5.0.0: dependencies: tr46: 0.0.3 webidl-conversions: 3.0.1 + which-typed-array@1.1.15: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.2 + which@2.0.2: dependencies: isexe: 2.0.0 + which@4.0.0: + dependencies: + isexe: 3.1.1 + + widest-line@3.1.0: + dependencies: + string-width: 4.2.3 + + widest-line@4.0.1: + dependencies: + string-width: 5.1.2 + + wordwrap@1.0.0: {} + wrap-ansi@7.0.0: dependencies: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + wrappy@1.0.2: {} write-file-atomic@4.0.2: @@ -3150,12 +9943,37 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 3.0.7 + ws@5.2.4: + dependencies: + async-limiter: 1.0.1 + + ws@7.5.10: {} + ws@8.17.1: {} + xml2js@0.6.2: + dependencies: + sax: 1.2.1 + xmlbuilder: 11.0.1 + + xmlbuilder@11.0.1: {} + + xmlhttprequest-ssl@2.0.0: {} + y18n@5.0.8: {} yallist@3.1.1: {} + yallist@4.0.0: {} + + yaml-js@0.2.3: {} + + yaml-types@0.3.0(yaml@2.5.0): + dependencies: + yaml: 2.5.0 + + yaml@2.5.0: {} + yargs-parser@21.1.1: {} yargs@17.7.2: @@ -3168,4 +9986,14 @@ snapshots: y18n: 5.0.8 yargs-parser: 21.1.1 + yn@3.1.1: {} + yocto-queue@0.1.0: {} + + yoga-wasm-web@0.3.3: {} + + zip-stream@4.1.1: + dependencies: + archiver-utils: 3.0.4 + compress-commons: 4.1.2 + readable-stream: 3.6.2 diff --git a/apps/ui/ingestion-ui/README.md b/apps/ui/ingestion-ui/README.md index e6b49b95..61f9f983 100644 --- a/apps/ui/ingestion-ui/README.md +++ b/apps/ui/ingestion-ui/README.md @@ -20,7 +20,7 @@ This template provides an easy way to spin up a UI for Firecrawl using React. It ``` 2. Set up your Firecrawl API key: - Open `src/components/FirecrawlComponent.tsx` and replace the placeholder API key: + Open `src/components/ingestion.tsx` and replace the placeholder API key: ```typescript const FIRECRAWL_API_KEY = "your-api-key-here"; @@ -36,7 +36,7 @@ This template provides an easy way to spin up a UI for Firecrawl using React. It ## Customization -The main Firecrawl component is located in `src/components/FirecrawlComponent.tsx`. You can modify this file to customize the UI or add additional features. +The main Firecrawl component is located in `src/components/ingestion.tsx`. You can modify this file to customize the UI or add additional features. ## Security Considerations diff --git a/apps/ui/ingestion-ui/package-lock.json b/apps/ui/ingestion-ui/package-lock.json index 7038a1f2..e48e99b8 100644 --- a/apps/ui/ingestion-ui/package-lock.json +++ b/apps/ui/ingestion-ui/package-lock.json @@ -11,6 +11,7 @@ "@radix-ui/react-checkbox": "^1.1.1", "@radix-ui/react-collapsible": "^1.1.0", "@radix-ui/react-label": "^2.1.0", + "@radix-ui/react-radio-group": "^1.2.0", "@radix-ui/react-slot": "^1.1.0", "class-variance-authority": "^0.7.0", "clsx": "^2.1.1", @@ -1192,6 +1193,32 @@ } } }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.0.tgz", + "integrity": "sha512-GZsZslMJEyo1VKm5L1ZJY8tGDxZNPAoUeQUIbKeJfoi7Q4kmig5AsgLMYYuyYbfjd8fBmFORAIwYAkXMnXZgZw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.0", + "@radix-ui/react-context": "1.1.0", + "@radix-ui/react-primitive": "2.0.0", + "@radix-ui/react-slot": "1.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-compose-refs": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.0.tgz", @@ -1220,6 +1247,21 @@ } } }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.0.tgz", + "integrity": "sha512-BUuBvgThEiAXh2DWu93XsT+a3aWrGqolGlqqw5VU1kG7p/ZH2cuDlM1sRLNnY3QcBS69UIz2mcKhMxDsdewhjg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-id": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.0.tgz", @@ -1304,6 +1346,69 @@ } } }, + "node_modules/@radix-ui/react-radio-group": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.2.0.tgz", + "integrity": "sha512-yv+oiLaicYMBpqgfpSPw6q+RyXlLdIpQWDHZbUKURxe+nEh53hFXPPlfhfQQtYkS5MMK/5IWIa76SksleQZSzw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.0", + "@radix-ui/react-compose-refs": "1.1.0", + "@radix-ui/react-context": "1.1.0", + "@radix-ui/react-direction": "1.1.0", + "@radix-ui/react-presence": "1.1.0", + "@radix-ui/react-primitive": "2.0.0", + "@radix-ui/react-roving-focus": "1.1.0", + "@radix-ui/react-use-controllable-state": "1.1.0", + "@radix-ui/react-use-previous": "1.1.0", + "@radix-ui/react-use-size": "1.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.0.tgz", + "integrity": "sha512-EA6AMGeq9AEeQDeSH0aZgG198qkfHSbvWTf1HvoDmOB5bBG/qTxjYMWUKMnYiV6J/iP/J8MEFSuB2zRU2n7ODA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.0", + "@radix-ui/react-collection": "1.1.0", + "@radix-ui/react-compose-refs": "1.1.0", + "@radix-ui/react-context": "1.1.0", + "@radix-ui/react-direction": "1.1.0", + "@radix-ui/react-id": "1.1.0", + "@radix-ui/react-primitive": "2.0.0", + "@radix-ui/react-use-callback-ref": "1.1.0", + "@radix-ui/react-use-controllable-state": "1.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz", diff --git a/apps/ui/ingestion-ui/package.json b/apps/ui/ingestion-ui/package.json index 48009648..01a754b2 100644 --- a/apps/ui/ingestion-ui/package.json +++ b/apps/ui/ingestion-ui/package.json @@ -13,6 +13,7 @@ "@radix-ui/react-checkbox": "^1.1.1", "@radix-ui/react-collapsible": "^1.1.0", "@radix-ui/react-label": "^2.1.0", + "@radix-ui/react-radio-group": "^1.2.0", "@radix-ui/react-slot": "^1.1.0", "class-variance-authority": "^0.7.0", "clsx": "^2.1.1", diff --git a/apps/ui/ingestion-ui/src/App.tsx b/apps/ui/ingestion-ui/src/App.tsx index eb0e6954..b80a5ad8 100644 --- a/apps/ui/ingestion-ui/src/App.tsx +++ b/apps/ui/ingestion-ui/src/App.tsx @@ -1,9 +1,35 @@ +import { useState } from "react"; import FirecrawlComponent from "./components/ingestion"; +import FirecrawlComponentV1 from "./components/ingestionV1"; +import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group"; +import { Label } from "@/components/ui/label"; function App() { + const [selectedComponent, setSelectedComponent] = useState<"v0" | "v1">("v1"); + return ( <> - +
+ setSelectedComponent(value as "v0" | "v1")} + className="flex space-x-6 mt-6" + > +
+ + +
+
+ + +
+
+
+ {selectedComponent === "v1" ? ( + + ) : ( + + )} ); } diff --git a/apps/ui/ingestion-ui/src/components/ingestionV1.tsx b/apps/ui/ingestion-ui/src/components/ingestionV1.tsx new file mode 100644 index 00000000..b34c0d6b --- /dev/null +++ b/apps/ui/ingestion-ui/src/components/ingestionV1.tsx @@ -0,0 +1,603 @@ +import { useState, ChangeEvent, FormEvent, useEffect } from "react"; +import { + Card, + CardHeader, + CardTitle, + CardContent, + CardFooter, +} from "@/components/ui/card"; +import { Input } from "@/components/ui/input"; +import { Button } from "@/components/ui/button"; +import { Checkbox } from "@/components/ui/checkbox"; +import { Label } from "@/components/ui/label"; +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@/components/ui/collapsible"; +import { ChevronDown, ChevronLeft, ChevronRight } from "lucide-react"; + +//! Hardcoded values (not recommended for production) +//! Highly recommended to move all Firecrawl API calls to the backend (e.g. Next.js API route) +const FIRECRAWL_API_URL = "https://api.firecrawl.dev"; // Replace with your actual API URL whether it is local or using Firecrawl Cloud +const FIRECRAWL_API_KEY = "fc-YOUR_API_KEY"; // Replace with your actual API key + +interface FormData { + url: string; + crawlSubPages: boolean; + search: string; + limit: string; + maxDepth: string; + excludePaths: string; + includePaths: string; + extractMainContent: boolean; +} + +interface CrawlerOptions { + includes?: string[]; + excludes?: string[]; + maxDepth?: number; + limit?: number; + returnOnlyUrls: boolean; +} + +interface ScrapeOptions { + formats?: string[]; + onlyMainContent?: boolean; +} + +interface PageOptions { + onlyMainContent: boolean; +} + +interface RequestBody { + url: string; + crawlerOptions?: CrawlerOptions; + pageOptions?: PageOptions; + search?: string; + excludePaths?: string[]; + includePaths?: string[]; + maxDepth?: number; + limit?: number; + scrapeOptions?: ScrapeOptions; + formats?: string[]; +} + +interface ScrapeResultMetadata { + title: string; + description: string; + language: string; + sourceURL: string; + pageStatusCode: number; + pageError?: string; + [key: string]: string | number | undefined; +} + +interface ScrapeResultData { + markdown: string; + content: string; + html: string; + rawHtml: string; + metadata: ScrapeResultMetadata; + llm_extraction: Record; + warning?: string; +} + +interface ScrapeResult { + success: boolean; + data: ScrapeResultData; +} + +export default function FirecrawlComponentV1() { + const [formData, setFormData] = useState({ + url: "", + crawlSubPages: false, + search: "", + limit: "", + maxDepth: "", + excludePaths: "", + includePaths: "", + extractMainContent: false, + }); + const [loading, setLoading] = useState(false); + const [scrapingSelectedLoading, setScrapingSelectedLoading] = + useState(false); + const [crawledUrls, setCrawledUrls] = useState([]); + const [selectedUrls, setSelectedUrls] = useState([]); + const [scrapeResults, setScrapeResults] = useState< + Record + >({}); + const [isCollapsibleOpen, setIsCollapsibleOpen] = useState(true); + const [crawlStatus, setCrawlStatus] = useState<{ + current: number; + total: number | null; + }>({ current: 0, total: null }); + const [elapsedTime, setElapsedTime] = useState(0); + const [showCrawlStatus, setShowCrawlStatus] = useState(false); + const [isScraping, setIsScraping] = useState(false); + const [currentPage, setCurrentPage] = useState(1); + const urlsPerPage = 10; + + useEffect(() => { + let timer: NodeJS.Timeout; + if (loading) { + setShowCrawlStatus(true); + timer = setInterval(() => { + setElapsedTime((prevTime) => prevTime + 1); + }, 1000); + } + return () => { + if (timer) clearInterval(timer); + }; + }, [loading]); + + const handleChange = (e: ChangeEvent) => { + const { name, value, type, checked } = e.target; + setFormData((prevData) => { + const newData = { + ...prevData, + [name]: type === "checkbox" ? checked : value, + }; + + // Automatically check "Crawl Sub-pages" if limit or search have content + if (name === "limit" || name === "search") { + newData.crawlSubPages = !!value || !!newData.limit || !!newData.search; + } + + return newData; + }); + }; + + const handleSubmit = async (e: FormEvent) => { + e.preventDefault(); + setLoading(true); + setIsCollapsibleOpen(false); + setElapsedTime(0); + setCrawlStatus({ current: 0, total: null }); + setIsScraping(!formData.crawlSubPages); + setCrawledUrls([]); + setSelectedUrls([]); + setScrapeResults({}); + setScrapingSelectedLoading(false); + setShowCrawlStatus(false); + + try { + const endpoint = `${FIRECRAWL_API_URL}/v1/${ + formData.crawlSubPages ? "map" : "scrape" + }`; + + const requestBody: RequestBody = formData.crawlSubPages + ? { + url: formData.url, + search: formData.search || undefined, + limit: formData.limit ? parseInt(formData.limit) : undefined, + } + : { + url: formData.url, + formats: ["markdown"], + }; + + const response = await fetch(endpoint, { + method: "POST", + headers: { + Authorization: `Bearer ${FIRECRAWL_API_KEY}`, + "Content-Type": "application/json", + }, + body: JSON.stringify(requestBody), + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data = await response.json(); + if (formData.crawlSubPages) { + if (data.success === true && Array.isArray(data.links)) { + setCrawledUrls(data.links); + setSelectedUrls(data.links); + setCrawlStatus({ + current: data.links.length, + total: data.links.length, + }); + + // Set scrape results with the links + const linkResults: Record = {}; + data.links.forEach((link: string) => { + linkResults[link] = { + success: true, + data: { + metadata: { + sourceURL: link, + title: "", + description: "", + language: "", + pageStatusCode: 200, + }, + markdown: "", + content: "", + html: "", + rawHtml: "", + llm_extraction: {}, + }, + }; + }); + } else { + console.error("Unexpected response format from map endpoint"); + console.log(data); + } + } else { + setScrapeResults({ [formData.url]: data }); + setCrawlStatus({ current: 1, total: 1 }); + } + } catch (error) { + console.error("Error:", error); + setScrapeResults({ + error: { + success: false, + data: { + metadata: { + pageError: "Error occurred while fetching data", + title: "", + description: "", + language: "", + sourceURL: "", + pageStatusCode: 0, + }, + markdown: "", + content: "", + html: "", + rawHtml: "", + llm_extraction: {}, + }, + }, + }); + } finally { + setLoading(false); + } + }; + + const handleScrapeSelected = async () => { + setLoading(true); + setElapsedTime(0); + setCrawlStatus({ current: 0, total: selectedUrls.length }); + setIsScraping(true); + setScrapingSelectedLoading(true); + const newScrapeResults: Record = {}; + + for (const [index, url] of selectedUrls.entries()) { + try { + const response = await fetch(`${FIRECRAWL_API_URL}/v1/scrape`, { + method: "POST", + headers: { + Authorization: `Bearer ${FIRECRAWL_API_KEY}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + url: url, + formats: ["markdown"], + }), + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data: ScrapeResult = await response.json(); + newScrapeResults[url] = data; + setCrawlStatus((prev) => ({ ...prev, current: index + 1 })); + setScrapeResults({ ...scrapeResults, ...newScrapeResults }); + } catch (error) { + console.error(`Error scraping ${url}:`, error); + newScrapeResults[url] = { + success: false, + data: { + markdown: "", + content: "", + html: "", + rawHtml: "", + metadata: { + title: "", + description: "", + language: "", + sourceURL: url, + pageStatusCode: 0, + pageError: (error as Error).message, + }, + llm_extraction: {}, + }, + }; + } + } + + setLoading(false); + setIsScraping(false); + }; + + const handlePageChange = (newPage: number) => { + setCurrentPage(newPage); + }; + + const paginatedUrls = crawledUrls.slice( + (currentPage - 1) * urlsPerPage, + currentPage * urlsPerPage + ); + + return ( +
+ + + + Extract web content (V1) + + Powered by Firecrawl 🔥 + + +
+ Use this component to quickly give your users the ability to connect + their AI apps to web data with Firecrawl. Learn more on the{" "} + + Firecrawl docs! + +
+
+ +
+
+ + +
+ + + + + +
+ + setFormData((prev) => ({ + ...prev, + crawlSubPages: checked, + })) + } + /> + +
+ +
+
+ + +
+
+ + +
+
+
+
+
+ {showCrawlStatus && ( +
+
+ {!isScraping && + crawledUrls.length > 0 && + !scrapingSelectedLoading && ( + <> + { + if (checked) { + setSelectedUrls([...crawledUrls]); + } else { + setSelectedUrls([]); + } + }} + /> + + + )} +
+
+ {isScraping + ? `Scraped ${crawlStatus.current} page(s) in ${elapsedTime}s` + : `Crawled ${crawlStatus.current} pages in ${elapsedTime}s`} +
+
+ )} + + {crawledUrls.length > 0 && + !scrapingSelectedLoading && + !isScraping && ( + <> +
    + {paginatedUrls.map((url, index) => ( +
  • + + setSelectedUrls((prev) => + prev.includes(url) + ? prev.filter((u) => u !== url) + : [...prev, url] + ) + } + /> + + {url.length > 70 ? `${url.slice(0, 70)}...` : url} + +
  • + ))} +
+
+ + + Page {currentPage} of{" "} + {Math.ceil(crawledUrls.length / urlsPerPage)} + + +
+ + )} +
+ + {crawledUrls.length > 0 && !scrapingSelectedLoading && ( + + )} + +
+ + {Object.keys(scrapeResults).length > 0 && ( +
+

Scrape Results

+

+ You can do whatever you want with the scrape results. Here is a + basic showcase of the markdown. +

+
+ {Object.entries(scrapeResults).map(([url, result]) => ( + + + {result.data.metadata.title} + + {url + .replace(/^(https?:\/\/)?(www\.)?/, "") + .replace(/\/$/, "")} + + + +
+ {result.success ? ( + <> +
+                          {result.data.markdown.trim()}
+                        
+ + ) : ( + <> +

+ Failed to scrape this URL +

+

+ {result.toString()} +

+ + )} +
+
+
+ ))} +
+
+ )} +
+ ); +} diff --git a/apps/ui/ingestion-ui/src/components/ui/radio-group.tsx b/apps/ui/ingestion-ui/src/components/ui/radio-group.tsx new file mode 100644 index 00000000..43b43b48 --- /dev/null +++ b/apps/ui/ingestion-ui/src/components/ui/radio-group.tsx @@ -0,0 +1,42 @@ +import * as React from "react" +import * as RadioGroupPrimitive from "@radix-ui/react-radio-group" +import { Circle } from "lucide-react" + +import { cn } from "@/lib/utils" + +const RadioGroup = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => { + return ( + + ) +}) +RadioGroup.displayName = RadioGroupPrimitive.Root.displayName + +const RadioGroupItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => { + return ( + + + + + + ) +}) +RadioGroupItem.displayName = RadioGroupPrimitive.Item.displayName + +export { RadioGroup, RadioGroupItem } diff --git a/docker-compose.yaml b/docker-compose.yaml index ffcbc4ee..24b51762 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,5 +1,4 @@ name: firecrawl -version: '3.9' x-common-service: &common-service build: apps/api @@ -16,7 +15,6 @@ x-common-service: &common-service - OPENAI_BASE_URL=${OPENAI_BASE_URL} - MODEL_NAME=${MODEL_NAME:-gpt-4o} - SLACK_WEBHOOK_URL=${SLACK_WEBHOOK_URL} - - SERPER_API_KEY=${SERPER_API_KEY} - LLAMAPARSE_API_KEY=${LLAMAPARSE_API_KEY} - LOGTAIL_KEY=${LOGTAIL_KEY} - BULL_AUTH_KEY=${BULL_AUTH_KEY} diff --git a/examples/find_internal_link_opportunites/find_internal_link_opportunites.ipynb b/examples/find_internal_link_opportunites/find_internal_link_opportunites.ipynb new file mode 100644 index 00000000..d6168878 --- /dev/null +++ b/examples/find_internal_link_opportunites/find_internal_link_opportunites.ipynb @@ -0,0 +1,509 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import datetime\n", + "import time\n", + "from firecrawl import FirecrawlApp\n", + "import json\n", + "import anthropic\n", + "from dotenv import load_dotenv\n", + "\n", + "# Load environment variables\n", + "load_dotenv()\n", + "\n", + "# Retrieve API keys from environment variables\n", + "anthropic_api_key = os.getenv(\"ANTHROPIC_API_KEY\") or \"\"\n", + "firecrawl_api_key = os.getenv(\"FIRECRAWL_API_KEY\") or \"\"\n", + "# Set variables\n", + "blog_url=\"https://mendable.ai/blog\"\n", + "\n", + "# Set up anthropic client\n", + "client = anthropic.Anthropic(\n", + " api_key=anthropic_api_key,\n", + ")\n", + "\n", + "# Initialize the FirecrawlApp with your API key\n", + "app = FirecrawlApp(api_key=firecrawl_api_key)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# Crawl a website\n", + "params = {\n", + " 'crawlOptions': {\n", + " 'limit': 100\n", + " },\n", + " \"pageOptions\": {\n", + " \"onlyMainContent\": True\n", + " }\n", + "}\n", + "crawl_result = app.crawl_url(blog_url, params=params)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting potential links from crawl_result:\n", + "Collected 36 potential links:\n", + "URL: https://mendable.ai/blog/coachgtm-mongodb, Title: Meet MongoDBs CoachGTM.ai\n", + "URL: https://mendable.ai/blog/building-safe-rag, Title: Building Safe RAG systems with the LLM OWASP top 10\n", + "URL: https://mendable.ai/blog/gdpr-repository-pattern, Title: Navigating the Maze of GDPR Compliance: A Codebase Transformation\n", + "URL: https://mendable.ai/blog/how-mendable-leverages-langsmith-to-debug-tools-and-actions, Title: How Mendable leverages Langsmith to debug Tools & Actions\n", + "URL: https://mendable.ai/blog/european-data-storage, Title: Launching European Data Storage powered by MongoDB\n", + "URL: https://mendable.ai/blog/tools, Title: Introducing Tools and Actions\n", + "URL: https://mendable.ai/blog/december_update, Title: Mendable.ai December Recap\n", + "URL: https://mendable.ai/blog/november_update, Title: Mendable.ai November Update\n", + "URL: https://mendable.ai/blog/october-recap, Title: Mendable.ai October Recap\n", + "URL: https://mendable.ai/blog/midseptemberupdate, Title: Mendable.ai Mid September 2023 Update\n", + "URL: https://mendable.ai/blog/getting-started, Title: Everything you need to know about Mendable: Build and deploy AI Chat Search\n", + "URL: https://mendable.ai/blog/building-copilots, Title: Building context-aware AI copilots with Mendable\n", + "URL: https://mendable.ai/blog/august2023update, Title: Mendable.ai August 2023 Updates\n", + "URL: https://mendable.ai/blog/finetuning-gpt35, Title: Early Insights Fine-Tuning GPT 3.5 from Mendable.ai\n", + "URL: https://mendable.ai/blog/gpt35prompting, Title: Improving GPT-3.5, Insights from Mendable.ai\n", + "URL: https://mendable.ai/blog/precisemode, Title: Introducing Precise Mode for Mendable.ai\n", + "URL: https://mendable.ai/blog/customprompt, Title: Customizing Your LLM Model on Mendable.ai\n", + "URL: https://mendable.ai/blog/mendable-launch, Title: Introducing Mendable.ai\n", + "URL: https://mendable.ai/blog/european-data-storage, Title: Launching European Data Storage powered by MongoDB\n", + "URL: https://mendable.ai/blog/customprompt, Title: Customizing Your LLM Model on Mendable.ai\n", + "URL: https://mendable.ai/blog/precisemode, Title: Introducing Precise Mode for Mendable.ai\n", + "URL: https://mendable.ai/blog/building-copilots, Title: Building context-aware AI copilots with Mendable\n", + "URL: https://mendable.ai/blog/coachgtm-mongodb, Title: Meet MongoDBs CoachGTM.ai\n", + "URL: https://mendable.ai/blog/building-safe-rag, Title: Building Safe RAG systems with the LLM OWASP top 10\n", + "URL: https://mendable.ai/blog/gdpr-repository-pattern, Title: Navigating the Maze of GDPR Compliance: A Codebase Transformation\n", + "URL: https://mendable.ai/blog/how-mendable-leverages-langsmith-to-debug-tools-and-actions, Title: How Mendable leverages Langsmith to debug Tools & Actions\n", + "URL: https://mendable.ai/blog/tools, Title: Introducing Tools and Actions\n", + "URL: https://mendable.ai/blog/december_update, Title: Mendable.ai December Recap\n", + "URL: https://mendable.ai/blog/november_update, Title: Mendable.ai November Update\n", + "URL: https://mendable.ai/blog/october-recap, Title: Mendable.ai October Recap\n", + "URL: https://mendable.ai/blog/midseptemberupdate, Title: Mendable.ai Mid September 2023 Update\n", + "URL: https://mendable.ai/blog/getting-started, Title: Everything you need to know about Mendable: Build and deploy AI Chat Search\n", + "URL: https://mendable.ai/blog/august2023update, Title: Mendable.ai August 2023 Updates\n", + "URL: https://mendable.ai/blog/finetuning-gpt35, Title: Early Insights Fine-Tuning GPT 3.5 from Mendable.ai\n", + "URL: https://mendable.ai/blog/gpt35prompting, Title: Improving GPT-3.5, Insights from Mendable.ai\n", + "URL: https://mendable.ai/blog/mendable-launch, Title: Introducing Mendable.ai\n" + ] + } + ], + "source": [ + "potential_links = []\n", + "\n", + "if crawl_result:\n", + " print(\"Collecting potential links from crawl_result:\")\n", + " \n", + " for item in crawl_result:\n", + " metadata = item[\"metadata\"]\n", + " og_url = metadata.get(\"ogUrl\")\n", + " title = metadata.get(\"title\")\n", + " if og_url and title and og_url != blog_url:\n", + " potential_links.append({\"url\": og_url, \"title\": title})\n", + " \n", + " print(f\"Collected {len(potential_links)} potential links:\")\n", + " for link in potential_links:\n", + " print(f\"URL: {link['url']}, Title: {link['title']}\")\n", + " \n", + "else:\n", + " print(\"crawl_result is empty or None\")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Suggestion for: Meet MongoDBs CoachGTM.ai\n", + "Blog phrase: Mendable also provides a Tools\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: Meet MongoDBs CoachGTM.ai\n", + "Blog phrase: MongoDB Atlas Vector Search to\n", + "Internal Link: https://mendable.ai/blog/european-data-storage\n", + "---\n", + "\n", + "\n", + "Suggestion for: Meet MongoDBs CoachGTM.ai\n", + "Blog phrase: By harnessing the power of\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Building Safe RAG systems with the LLM OWASP top 10\n", + "Blog phrase: Advantages of RAG\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Building Safe RAG systems with the LLM OWASP top 10\n", + "Blog phrase: Bring Your Model\n", + "Internal Link: https://mendable.ai/blog/customprompt\n", + "---\n", + "\n", + "\n", + "Suggestion for: Building Safe RAG systems with the LLM OWASP top 10\n", + "Blog phrase: Garbage in, Garbage out\n", + "Internal Link: https://mendable.ai/blog/precisemode\n", + "---\n", + "\n", + "\n", + "Suggestion for: Navigating the Maze of GDPR Compliance: A Codebase Transformation\n", + "Blog phrase: European data storage\n", + "Internal Link: https://mendable.ai/blog/european-data-storage\n", + "---\n", + "\n", + "\n", + "Suggestion for: Navigating the Maze of GDPR Compliance: A Codebase Transformation\n", + "Blog phrase: delivering value\n", + "Internal Link: https://mendable.ai/blog/getting-started\n", + "---\n", + "\n", + "\n", + "Suggestion for: How Mendable leverages Langsmith to debug Tools & Actions\n", + "Blog phrase: introduction of Tools & Actions\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: How Mendable leverages Langsmith to debug Tools & Actions\n", + "Blog phrase: Mendable Tools & Actions\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: Launching European Data Storage powered by MongoDB\n", + "Blog phrase: Clean Architecture and Repository pattern\n", + "Internal Link: https://mendable.ai/blog/gdpr-repository-pattern\n", + "---\n", + "\n", + "\n", + "Suggestion for: Launching European Data Storage powered by MongoDB\n", + "Blog phrase: building the best AI Chat\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Launching European Data Storage powered by MongoDB\n", + "Blog phrase: European RAG pipeline, powered by\n", + "Internal Link: https://mendable.ai/blog/building-safe-rag\n", + "---\n", + "\n", + "\n", + "Suggestion for: Introducing Tools and Actions\n", + "Blog phrase: augmentation and actions for automation\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Introducing Tools and Actions\n", + "Blog phrase: Mendable provides an API request\n", + "Internal Link: https://mendable.ai/blog/getting-started\n", + "---\n", + "\n", + "\n", + "Suggestion for: Introducing Tools and Actions\n", + "Blog phrase: AI use it when it\n", + "Internal Link: https://mendable.ai/blog/how-mendable-leverages-langsmith-to-debug-tools-and-actions\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai December Recap\n", + "Blog phrase: customizing the model\n", + "Internal Link: https://mendable.ai/blog/customprompt\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai December Recap\n", + "Blog phrase: AI sales copilot\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai December Recap\n", + "Blog phrase: Introducing Tools and Actions\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai November Update\n", + "Blog phrase: Auto syncing data sources\n", + "Internal Link: https://mendable.ai/blog/european-data-storage\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai November Update\n", + "Blog phrase: Chat insights feature\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai November Update\n", + "Blog phrase: Github private repo support\n", + "Internal Link: https://mendable.ai/blog/getting-started\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai October Recap\n", + "Blog phrase: Full Prompt Customization\n", + "Internal Link: https://mendable.ai/blog/customprompt\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai October Recap\n", + "Blog phrase: Expanded Model Support\n", + "Internal Link: https://mendable.ai/blog/gpt35prompting\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai October Recap\n", + "Blog phrase: AI-Powered Documentation Management\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai Mid September 2023 Update\n", + "Blog phrase: new integration templates\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai Mid September 2023 Update\n", + "Blog phrase: Product Copilot feature\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai Mid September 2023 Update\n", + "Blog phrase: Data Exporting\n", + "Internal Link: https://mendable.ai/blog/getting-started\n", + "---\n", + "\n", + "\n", + "Suggestion for: Everything you need to know about Mendable: Build and deploy AI Chat Search\n", + "Blog phrase: robust API\n", + "Internal Link: https://mendable.ai/blog/tools\n", + "---\n", + "\n", + "\n", + "Suggestion for: Everything you need to know about Mendable: Build and deploy AI Chat Search\n", + "Blog phrase: pre-built components\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Everything you need to know about Mendable: Build and deploy AI Chat Search\n", + "Blog phrase: Customizing Your LLM Model\n", + "Internal Link: https://mendable.ai/blog/customprompt\n", + "---\n", + "\n", + "\n", + "Suggestion for: Building context-aware AI copilots with Mendable\n", + "Blog phrase: registered on our platform\n", + "Internal Link: https://mendable.ai/blog/getting-started\n", + "---\n", + "\n", + "\n", + "Suggestion for: Building context-aware AI copilots with Mendable\n", + "Blog phrase: dynamic context to the AI\n", + "Internal Link: https://mendable.ai/blog/customprompt\n", + "---\n", + "\n", + "\n", + "Suggestion for: Building context-aware AI copilots with Mendable\n", + "Blog phrase: personalized answers to your users\n", + "Internal Link: https://mendable.ai/blog/precisemode\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai August 2023 Updates\n", + "Blog phrase: Learn more about how to\n", + "Internal Link: https://mendable.ai/blog/precisemode\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai August 2023 Updates\n", + "Blog phrase: Building context-aware AI copilots with\n", + "Internal Link: https://mendable.ai/blog/building-copilots\n", + "---\n", + "\n", + "\n", + "Suggestion for: Mendable.ai August 2023 Updates\n", + "Blog phrase: customizable AI chat components\n", + "Internal Link: https://mendable.ai/blog/getting-started\n", + "---\n", + "\n", + "\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[15], line 57\u001b[0m\n\u001b[1;32m 27\u001b[0m prompt_instructions \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\u001b[38;5;124mGiven this blog post from \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcurrent_blog_url\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m called \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcurrent_blog_title\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, analyze the following blog content. Identify 0 to 3 of phrases (5 words max) from the inside of the middle of the article that could be linked to other blog posts from the list of potential links provided inside of . Return a JSON object structured as follows:\u001b[39m\n\u001b[1;32m 28\u001b[0m \n\u001b[1;32m 29\u001b[0m \u001b[38;5;124m\u001b[39m\u001b[38;5;130;01m{{\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 53\u001b[0m \n\u001b[1;32m 54\u001b[0m \u001b[38;5;124mGO AND ONLY RETURN THE JSON NOTHING ELSE:\u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[1;32m 56\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 57\u001b[0m message \u001b[38;5;241m=\u001b[39m \u001b[43mclient\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 58\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mclaude-3-5-sonnet-20240620\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 59\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1024\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 60\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\n\u001b[1;32m 61\u001b[0m \u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrole\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43muser\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcontent\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mprompt_instructions\u001b[49m\u001b[43m}\u001b[49m\n\u001b[1;32m 62\u001b[0m \u001b[43m \u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 63\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 65\u001b[0m \u001b[38;5;66;03m# Extract the JSON string from the TextBlock\u001b[39;00m\n\u001b[1;32m 66\u001b[0m json_string \u001b[38;5;241m=\u001b[39m message\u001b[38;5;241m.\u001b[39mcontent[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mtext\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/anthropic/_utils/_utils.py:277\u001b[0m, in \u001b[0;36mrequired_args..inner..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 275\u001b[0m msg \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMissing required argument: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mquote(missing[\u001b[38;5;241m0\u001b[39m])\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 276\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(msg)\n\u001b[0;32m--> 277\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/anthropic/resources/messages.py:904\u001b[0m, in \u001b[0;36mMessages.create\u001b[0;34m(self, max_tokens, messages, model, metadata, stop_sequences, stream, system, temperature, tool_choice, tools, top_k, top_p, extra_headers, extra_query, extra_body, timeout)\u001b[0m\n\u001b[1;32m 870\u001b[0m \u001b[38;5;129m@required_args\u001b[39m([\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmax_tokens\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmodel\u001b[39m\u001b[38;5;124m\"\u001b[39m], [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmax_tokens\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmodel\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstream\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[1;32m 871\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate\u001b[39m(\n\u001b[1;32m 872\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 902\u001b[0m timeout: \u001b[38;5;28mfloat\u001b[39m \u001b[38;5;241m|\u001b[39m httpx\u001b[38;5;241m.\u001b[39mTimeout \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m|\u001b[39m NotGiven \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m600\u001b[39m,\n\u001b[1;32m 903\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Message \u001b[38;5;241m|\u001b[39m Stream[RawMessageStreamEvent]:\n\u001b[0;32m--> 904\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_post\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 905\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m/v1/messages\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 906\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaybe_transform\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 907\u001b[0m \u001b[43m \u001b[49m\u001b[43m{\u001b[49m\n\u001b[1;32m 908\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmax_tokens\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 909\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmessages\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 910\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmodel\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 911\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmetadata\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmetadata\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 912\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstop_sequences\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop_sequences\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 913\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstream\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 914\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msystem\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43msystem\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 915\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtemperature\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 916\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtool_choice\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtool_choice\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 917\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtools\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 918\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtop_k\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_k\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 919\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtop_p\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_p\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 920\u001b[0m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 921\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessage_create_params\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mMessageCreateParams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 922\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 923\u001b[0m \u001b[43m \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmake_request_options\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 924\u001b[0m \u001b[43m \u001b[49m\u001b[43mextra_headers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_headers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mextra_query\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_query\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mextra_body\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_body\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\n\u001b[1;32m 925\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 926\u001b[0m \u001b[43m \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mMessage\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 927\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 928\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mStream\u001b[49m\u001b[43m[\u001b[49m\u001b[43mRawMessageStreamEvent\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 929\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/anthropic/_base_client.py:1249\u001b[0m, in \u001b[0;36mSyncAPIClient.post\u001b[0;34m(self, path, cast_to, body, options, files, stream, stream_cls)\u001b[0m\n\u001b[1;32m 1235\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpost\u001b[39m(\n\u001b[1;32m 1236\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 1237\u001b[0m path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1244\u001b[0m stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 1245\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ResponseT \u001b[38;5;241m|\u001b[39m _StreamT:\n\u001b[1;32m 1246\u001b[0m opts \u001b[38;5;241m=\u001b[39m FinalRequestOptions\u001b[38;5;241m.\u001b[39mconstruct(\n\u001b[1;32m 1247\u001b[0m method\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpost\u001b[39m\u001b[38;5;124m\"\u001b[39m, url\u001b[38;5;241m=\u001b[39mpath, json_data\u001b[38;5;241m=\u001b[39mbody, files\u001b[38;5;241m=\u001b[39mto_httpx_files(files), \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39moptions\n\u001b[1;32m 1248\u001b[0m )\n\u001b[0;32m-> 1249\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m cast(ResponseT, \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mopts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\u001b[43m)\u001b[49m)\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/anthropic/_base_client.py:931\u001b[0m, in \u001b[0;36mSyncAPIClient.request\u001b[0;34m(self, cast_to, options, remaining_retries, stream, stream_cls)\u001b[0m\n\u001b[1;32m 922\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrequest\u001b[39m(\n\u001b[1;32m 923\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 924\u001b[0m cast_to: Type[ResponseT],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 929\u001b[0m stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 930\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ResponseT \u001b[38;5;241m|\u001b[39m _StreamT:\n\u001b[0;32m--> 931\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 932\u001b[0m \u001b[43m \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 933\u001b[0m \u001b[43m \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 934\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 935\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 936\u001b[0m \u001b[43m \u001b[49m\u001b[43mremaining_retries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mremaining_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 937\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/anthropic/_base_client.py:962\u001b[0m, in \u001b[0;36mSyncAPIClient._request\u001b[0;34m(self, cast_to, options, remaining_retries, stream, stream_cls)\u001b[0m\n\u001b[1;32m 959\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSending HTTP Request: \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, request\u001b[38;5;241m.\u001b[39mmethod, request\u001b[38;5;241m.\u001b[39murl)\n\u001b[1;32m 961\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 962\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 963\u001b[0m \u001b[43m \u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 964\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_should_stream_response_body\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 965\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 966\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 967\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m httpx\u001b[38;5;241m.\u001b[39mTimeoutException \u001b[38;5;28;01mas\u001b[39;00m err:\n\u001b[1;32m 968\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mEncountered httpx.TimeoutException\u001b[39m\u001b[38;5;124m\"\u001b[39m, exc_info\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpx/_client.py:901\u001b[0m, in \u001b[0;36mClient.send\u001b[0;34m(self, request, stream, auth, follow_redirects)\u001b[0m\n\u001b[1;32m 893\u001b[0m follow_redirects \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 894\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfollow_redirects\n\u001b[1;32m 895\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(follow_redirects, UseClientDefault)\n\u001b[1;32m 896\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m follow_redirects\n\u001b[1;32m 897\u001b[0m )\n\u001b[1;32m 899\u001b[0m auth \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_build_request_auth(request, auth)\n\u001b[0;32m--> 901\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_handling_auth\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 902\u001b[0m \u001b[43m \u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 903\u001b[0m \u001b[43m \u001b[49m\u001b[43mauth\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mauth\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 904\u001b[0m \u001b[43m \u001b[49m\u001b[43mfollow_redirects\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfollow_redirects\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 905\u001b[0m \u001b[43m \u001b[49m\u001b[43mhistory\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 906\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 907\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 908\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m stream:\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpx/_client.py:929\u001b[0m, in \u001b[0;36mClient._send_handling_auth\u001b[0;34m(self, request, auth, follow_redirects, history)\u001b[0m\n\u001b[1;32m 926\u001b[0m request \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mnext\u001b[39m(auth_flow)\n\u001b[1;32m 928\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m--> 929\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_handling_redirects\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 930\u001b[0m \u001b[43m \u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 931\u001b[0m \u001b[43m \u001b[49m\u001b[43mfollow_redirects\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfollow_redirects\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 932\u001b[0m \u001b[43m \u001b[49m\u001b[43mhistory\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhistory\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 933\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 934\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 935\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpx/_client.py:966\u001b[0m, in \u001b[0;36mClient._send_handling_redirects\u001b[0;34m(self, request, follow_redirects, history)\u001b[0m\n\u001b[1;32m 963\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_event_hooks[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrequest\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n\u001b[1;32m 964\u001b[0m hook(request)\n\u001b[0;32m--> 966\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_single_request\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 967\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 968\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_event_hooks[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mresponse\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpx/_client.py:1002\u001b[0m, in \u001b[0;36mClient._send_single_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 997\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[1;32m 998\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAttempted to send an async request with a sync Client instance.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 999\u001b[0m )\n\u001b[1;32m 1001\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m request_context(request\u001b[38;5;241m=\u001b[39mrequest):\n\u001b[0;32m-> 1002\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mtransport\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhandle_request\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1004\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(response\u001b[38;5;241m.\u001b[39mstream, SyncByteStream)\n\u001b[1;32m 1006\u001b[0m response\u001b[38;5;241m.\u001b[39mrequest \u001b[38;5;241m=\u001b[39m request\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpx/_transports/default.py:228\u001b[0m, in \u001b[0;36mHTTPTransport.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 215\u001b[0m req \u001b[38;5;241m=\u001b[39m httpcore\u001b[38;5;241m.\u001b[39mRequest(\n\u001b[1;32m 216\u001b[0m method\u001b[38;5;241m=\u001b[39mrequest\u001b[38;5;241m.\u001b[39mmethod,\n\u001b[1;32m 217\u001b[0m url\u001b[38;5;241m=\u001b[39mhttpcore\u001b[38;5;241m.\u001b[39mURL(\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 225\u001b[0m extensions\u001b[38;5;241m=\u001b[39mrequest\u001b[38;5;241m.\u001b[39mextensions,\n\u001b[1;32m 226\u001b[0m )\n\u001b[1;32m 227\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m map_httpcore_exceptions():\n\u001b[0;32m--> 228\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_pool\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhandle_request\u001b[49m\u001b[43m(\u001b[49m\u001b[43mreq\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 230\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(resp\u001b[38;5;241m.\u001b[39mstream, typing\u001b[38;5;241m.\u001b[39mIterable)\n\u001b[1;32m 232\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m Response(\n\u001b[1;32m 233\u001b[0m status_code\u001b[38;5;241m=\u001b[39mresp\u001b[38;5;241m.\u001b[39mstatus,\n\u001b[1;32m 234\u001b[0m headers\u001b[38;5;241m=\u001b[39mresp\u001b[38;5;241m.\u001b[39mheaders,\n\u001b[1;32m 235\u001b[0m stream\u001b[38;5;241m=\u001b[39mResponseStream(resp\u001b[38;5;241m.\u001b[39mstream),\n\u001b[1;32m 236\u001b[0m extensions\u001b[38;5;241m=\u001b[39mresp\u001b[38;5;241m.\u001b[39mextensions,\n\u001b[1;32m 237\u001b[0m )\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/connection_pool.py:268\u001b[0m, in \u001b[0;36mConnectionPool.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 266\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m ShieldCancellation():\n\u001b[1;32m 267\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mresponse_closed(status)\n\u001b[0;32m--> 268\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exc\n\u001b[1;32m 269\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 270\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/connection_pool.py:251\u001b[0m, in \u001b[0;36mConnectionPool.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 248\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exc\n\u001b[1;32m 250\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 251\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhandle_request\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 252\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ConnectionNotAvailable:\n\u001b[1;32m 253\u001b[0m \u001b[38;5;66;03m# The ConnectionNotAvailable exception is a special case, that\u001b[39;00m\n\u001b[1;32m 254\u001b[0m \u001b[38;5;66;03m# indicates we need to retry the request on a new connection.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 258\u001b[0m \u001b[38;5;66;03m# might end up as an HTTP/2 connection, but which actually ends\u001b[39;00m\n\u001b[1;32m 259\u001b[0m \u001b[38;5;66;03m# up as HTTP/1.1.\u001b[39;00m\n\u001b[1;32m 260\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pool_lock:\n\u001b[1;32m 261\u001b[0m \u001b[38;5;66;03m# Maintain our position in the request queue, but reset the\u001b[39;00m\n\u001b[1;32m 262\u001b[0m \u001b[38;5;66;03m# status so that the request becomes queued again.\u001b[39;00m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/connection.py:103\u001b[0m, in \u001b[0;36mHTTPConnection.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 100\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_connection\u001b[38;5;241m.\u001b[39mis_available():\n\u001b[1;32m 101\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ConnectionNotAvailable()\n\u001b[0;32m--> 103\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_connection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhandle_request\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/http11.py:133\u001b[0m, in \u001b[0;36mHTTP11Connection.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 131\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m Trace(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mresponse_closed\u001b[39m\u001b[38;5;124m\"\u001b[39m, logger, request) \u001b[38;5;28;01mas\u001b[39;00m trace:\n\u001b[1;32m 132\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_response_closed()\n\u001b[0;32m--> 133\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exc\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/http11.py:111\u001b[0m, in \u001b[0;36mHTTP11Connection.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 101\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m Trace(\n\u001b[1;32m 104\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mreceive_response_headers\u001b[39m\u001b[38;5;124m\"\u001b[39m, logger, request, kwargs\n\u001b[1;32m 105\u001b[0m ) \u001b[38;5;28;01mas\u001b[39;00m trace:\n\u001b[1;32m 106\u001b[0m (\n\u001b[1;32m 107\u001b[0m http_version,\n\u001b[1;32m 108\u001b[0m status,\n\u001b[1;32m 109\u001b[0m reason_phrase,\n\u001b[1;32m 110\u001b[0m headers,\n\u001b[0;32m--> 111\u001b[0m ) \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_receive_response_headers\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 112\u001b[0m trace\u001b[38;5;241m.\u001b[39mreturn_value \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 113\u001b[0m http_version,\n\u001b[1;32m 114\u001b[0m status,\n\u001b[1;32m 115\u001b[0m reason_phrase,\n\u001b[1;32m 116\u001b[0m headers,\n\u001b[1;32m 117\u001b[0m )\n\u001b[1;32m 119\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m Response(\n\u001b[1;32m 120\u001b[0m status\u001b[38;5;241m=\u001b[39mstatus,\n\u001b[1;32m 121\u001b[0m headers\u001b[38;5;241m=\u001b[39mheaders,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 127\u001b[0m },\n\u001b[1;32m 128\u001b[0m )\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/http11.py:176\u001b[0m, in \u001b[0;36mHTTP11Connection._receive_response_headers\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 173\u001b[0m timeout \u001b[38;5;241m=\u001b[39m timeouts\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mread\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[1;32m 175\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m--> 176\u001b[0m event \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_receive_event\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 177\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(event, h11\u001b[38;5;241m.\u001b[39mResponse):\n\u001b[1;32m 178\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_sync/http11.py:212\u001b[0m, in \u001b[0;36mHTTP11Connection._receive_event\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 209\u001b[0m event \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_h11_state\u001b[38;5;241m.\u001b[39mnext_event()\n\u001b[1;32m 211\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m event \u001b[38;5;129;01mis\u001b[39;00m h11\u001b[38;5;241m.\u001b[39mNEED_DATA:\n\u001b[0;32m--> 212\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_network_stream\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 213\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mREAD_NUM_BYTES\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\n\u001b[1;32m 214\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 216\u001b[0m \u001b[38;5;66;03m# If we feed this case through h11 we'll raise an exception like:\u001b[39;00m\n\u001b[1;32m 217\u001b[0m \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[1;32m 218\u001b[0m \u001b[38;5;66;03m# httpcore.RemoteProtocolError: can't handle event type\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 222\u001b[0m \u001b[38;5;66;03m# perspective. Instead we handle this case distinctly and treat\u001b[39;00m\n\u001b[1;32m 223\u001b[0m \u001b[38;5;66;03m# it as a ConnectError.\u001b[39;00m\n\u001b[1;32m 224\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m data \u001b[38;5;241m==\u001b[39m \u001b[38;5;124mb\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_h11_state\u001b[38;5;241m.\u001b[39mtheir_state \u001b[38;5;241m==\u001b[39m h11\u001b[38;5;241m.\u001b[39mSEND_RESPONSE:\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/site-packages/httpcore/_backends/sync.py:126\u001b[0m, in \u001b[0;36mSyncStream.read\u001b[0;34m(self, max_bytes, timeout)\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m map_exceptions(exc_map):\n\u001b[1;32m 125\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sock\u001b[38;5;241m.\u001b[39msettimeout(timeout)\n\u001b[0;32m--> 126\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_sock\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrecv\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmax_bytes\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/ssl.py:1292\u001b[0m, in \u001b[0;36mSSLSocket.recv\u001b[0;34m(self, buflen, flags)\u001b[0m\n\u001b[1;32m 1288\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m flags \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m 1289\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 1290\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnon-zero flags not allowed in calls to recv() on \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m\n\u001b[1;32m 1291\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m)\n\u001b[0;32m-> 1292\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbuflen\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1293\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1294\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39mrecv(buflen, flags)\n", + "File \u001b[0;32m~/projects/python_projects/agents_testing/.conda/lib/python3.10/ssl.py:1165\u001b[0m, in \u001b[0;36mSSLSocket.read\u001b[0;34m(self, len, buffer)\u001b[0m\n\u001b[1;32m 1163\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sslobj\u001b[38;5;241m.\u001b[39mread(\u001b[38;5;28mlen\u001b[39m, buffer)\n\u001b[1;32m 1164\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1165\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_sslobj\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mlen\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1166\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m SSLError \u001b[38;5;28;01mas\u001b[39;00m x:\n\u001b[1;32m 1167\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m x\u001b[38;5;241m.\u001b[39margs[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;241m==\u001b[39m SSL_ERROR_EOF \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msuppress_ragged_eofs:\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "import json\n", + "import csv\n", + "\n", + "# Assuming we have the following variables from the previous code:\n", + "# crawl_result, client, potential_links\n", + "\n", + "# Convert potential_links to a JSON string\n", + "potential_links_json = json.dumps(potential_links, indent=2)\n", + "\n", + "# Prepare CSV file\n", + "csv_filename = \"link_suggestions.csv\"\n", + "csv_headers = [\"Source Blog Title\", \"Source Blog URL\", \"Target Phrase\", \"Suggested Link URL\"]\n", + "\n", + "# Write headers to the CSV file\n", + "with open(csv_filename, 'w', newline='', encoding='utf-8') as csvfile:\n", + " csvwriter = csv.writer(csvfile)\n", + " csvwriter.writerow(csv_headers)\n", + "\n", + "# Loop through each blog post content\n", + "for item in crawl_result:\n", + " current_blog_url = item[\"metadata\"].get(\"ogUrl\", \"\")\n", + " if current_blog_url == blog_url:\n", + " continue\n", + " current_blog_content = item[\"content\"]\n", + " current_blog_title = item[\"metadata\"].get(\"title\", \"\")\n", + "\n", + " prompt_instructions = f\"\"\"Given this blog post from {current_blog_url} called '{current_blog_title}', analyze the following blog content. Identify 0 to 3 of phrases (5 words max) from the inside of the middle of the article that could be linked to other blog posts from the list of potential links provided inside of . Return a JSON object structured as follows:\n", + "\n", + " {{\n", + " \"link_suggestions\": [\n", + " {{\n", + " \"target_phrase\": \"the EXACT phrase from the to be linked to one of the links in (5 words max)\",\n", + " \"suggested_link_url\": \"url of the suggested internal link from \",\n", + " }}\n", + " ],\n", + " \"metadata\": {{\n", + " \"source_blog_url\": \"{current_blog_url}\",\n", + " \"source_blog_title\": \"{current_blog_title}\",\n", + " }}\n", + " }}\n", + "\n", + " Ensure that you provide the EXACT phrase from in target_phrase (5 words max) to locate each suggestion in the blog content without using character positions. Your target phrases must NOT be a title!\n", + "\n", + " Blog Content:\n", + " \n", + " {current_blog_content}\n", + " \n", + "\n", + " Potential Links:\n", + " \n", + " {potential_links_json}\n", + " \n", + "\n", + " GO AND ONLY RETURN THE JSON NOTHING ELSE:\"\"\"\n", + "\n", + " try:\n", + " message = client.messages.create(\n", + " model=\"claude-3-5-sonnet-20240620\",\n", + " max_tokens=1024,\n", + " messages=[\n", + " {\"role\": \"user\", \"content\": prompt_instructions}\n", + " ]\n", + " )\n", + " \n", + " # Extract the JSON string from the TextBlock\n", + " json_string = message.content[0].text\n", + " \n", + " # Parse the JSON response\n", + " response_json = json.loads(json_string)\n", + " \n", + " # Write suggestions to CSV\n", + " for suggestion in response_json['link_suggestions']:\n", + " print(\"Suggestion for: \" + current_blog_title )\n", + " print(\"Blog phrase: \" + suggestion['target_phrase']) \n", + " print(\"Internal Link: \" + suggestion['suggested_link_url'])\n", + " print(\"---\\n\\n\")\n", + "\n", + " # Open the CSV file in append mode and write the new row\n", + " with open(csv_filename, 'a', newline='', encoding='utf-8') as csvfile:\n", + " csvwriter = csv.writer(csvfile)\n", + " csvwriter.writerow([\n", + " response_json['metadata']['source_blog_title'],\n", + " response_json['metadata']['source_blog_url'],\n", + " suggestion['target_phrase'],\n", + " suggestion['suggested_link_url'],\n", + " ])\n", + " \n", + " except json.JSONDecodeError:\n", + " print(f\"Error parsing JSON response for blog {current_blog_title}\")\n", + " print(\"Raw response:\", message.content)\n", + " except Exception as e:\n", + " print(f\"Error processing blog {current_blog_title}: {str(e)}\")\n", + " \n", + "\n", + "print(f\"Finished processing all blog posts. Results saved to {csv_filename}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kubernetes/cluster-install/secret.yaml b/examples/kubernetes/cluster-install/secret.yaml index 2be96320..6d8eed3b 100644 --- a/examples/kubernetes/cluster-install/secret.yaml +++ b/examples/kubernetes/cluster-install/secret.yaml @@ -6,7 +6,6 @@ type: Opaque data: OPENAI_API_KEY: "" SLACK_WEBHOOK_URL: "" - SERPER_API_KEY: "" LLAMAPARSE_API_KEY: "" LOGTAIL_KEY: "" BULL_AUTH_KEY: "" diff --git a/examples/simple_web_data_extraction_with_claude/simple_web_data_extraction_with_claude.ipynb b/examples/simple_web_data_extraction_with_claude/simple_web_data_extraction_with_claude.ipynb new file mode 100644 index 00000000..ee14f147 --- /dev/null +++ b/examples/simple_web_data_extraction_with_claude/simple_web_data_extraction_with_claude.ipynb @@ -0,0 +1,259 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Web Scraping and Extraction with Firecrawl and Claude\n", + "\n", + "This notebook demonstrates how to use Firecrawl to scrape web content and Claude to extract structured data from it." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 1: Import Required Libraries" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import os\n", + "import json\n", + "from firecrawl import FirecrawlApp\n", + "from anthropic import Anthropic\n", + "from dotenv import load_dotenv\n", + "\n", + "# Load environment variables\n", + "load_dotenv()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 2: Set Up API Keys and URL" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "URL to scrape: https://mendable.ai\n" + ] + } + ], + "source": [ + "# Retrieve API keys from environment variables\n", + "anthropic_api_key = os.getenv(\"ANTHROPIC_API_KEY\")\n", + "firecrawl_api_key = os.getenv(\"FIRECRAWL_API_KEY\")\n", + "\n", + "# Set the URL to scrape\n", + "url = \"https://mendable.ai\" # Replace with the actual URL you want to scrape\n", + "\n", + "print(f\"URL to scrape: {url}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 3: Initialize Firecrawl and Anthropic Clients" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Firecrawl and Anthropic clients initialized.\n" + ] + } + ], + "source": [ + "# Initialize FirecrawlApp and Anthropic client\n", + "firecrawl_app = FirecrawlApp(api_key=firecrawl_api_key)\n", + "anthropic_client = Anthropic(api_key=anthropic_api_key)\n", + "\n", + "print(\"Firecrawl and Anthropic clients initialized.\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 4: Scrape the URL using Firecrawl" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Page content scraped. Length: 16199 characters\n" + ] + } + ], + "source": [ + "# Scrape the URL using Firecrawl\n", + "page_content = firecrawl_app.scrape_url(url, params={\"pageOptions\": {\"onlyMainContent\": True}})\n", + "\n", + "print(f\"Page content scraped. Length: {len(page_content['content'])} characters\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 5: Prepare the Prompt for Claude" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Prompt prepared for Claude.\n" + ] + } + ], + "source": [ + "# Prepare the prompt for Claude\n", + "prompt = f\"\"\"Analyze the following webpage content and extract the following information:\n", + "1. The title of the page\n", + "2. Whether the company is part of Y Combinator (YC)\n", + "3. Whether the company/product is open source\n", + "\n", + "Return the information in JSON format with the following schema:\n", + "{{\n", + " \"main_header_title\": string,\n", + " \"is_yc_company\": boolean,\n", + " \"is_open_source\": boolean\n", + "}}\n", + "\n", + "Webpage content:\n", + "{page_content['content']}\n", + "\n", + "Return only the JSON, nothing else.\"\"\"\n", + "\n", + "print(\"Prompt prepared for Claude.\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 6: Query Claude" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Claude response received.\n" + ] + } + ], + "source": [ + "# Query Claude\n", + "response = anthropic_client.messages.create(\n", + " model=\"claude-3-opus-20240229\",\n", + " max_tokens=1000,\n", + " messages=[\n", + " {\"role\": \"user\", \"content\": prompt}\n", + " ]\n", + ")\n", + "\n", + "print(\"Claude response received.\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 7: Parse and Display the Result" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " \"title\": \"Just in time answers for Sales and Support\",\n", + " \"is_yc_company\": true,\n", + " \"is_open_source\": false\n", + "}\n" + ] + } + ], + "source": [ + "# Parse and print the result\n", + "result = json.loads(response.content[0].text)\n", + "print(json.dumps(result, indent=2))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/img/firecrawl_logo.png b/img/firecrawl_logo.png new file mode 100644 index 00000000..bd723222 Binary files /dev/null and b/img/firecrawl_logo.png differ