dependabot for security checks, fixed crawl test

This commit is contained in:
rafaelsideguide 2024-07-05 14:49:03 -03:00
parent 2b36de605a
commit a2cdc520e6
3 changed files with 52 additions and 89 deletions

View File

@ -5,11 +5,8 @@ updates:
directory: "/apps/playwright-service"
schedule:
interval: "weekly"
groups:
prod-deps:
dependency-type: "production"
dev-deps:
dependency-type: "development"
open-pull-requests-limit: 0 # Disable version updates
security-updates: "all"
commit-message:
prefix: "apps/playwright-service"
include: "scope"
@ -19,11 +16,8 @@ updates:
directory: "/apps/python-sdk"
schedule:
interval: "weekly"
groups:
prod-deps:
dependency-type: "production"
dev-deps:
dependency-type: "development"
open-pull-requests-limit: 0 # Disable version updates
security-updates: "all"
commit-message:
prefix: "apps/python-sdk"
include: "scope"
@ -33,11 +27,8 @@ updates:
directory: "/apps/api"
schedule:
interval: "weekly"
groups:
prod-deps:
dependency-type: "production"
dev-deps:
dependency-type: "development"
open-pull-requests-limit: 0 # Disable version updates
security-updates: "all"
commit-message:
prefix: "apps/api"
include: "scope"
@ -47,11 +38,8 @@ updates:
directory: "/apps/test-suite"
schedule:
interval: "weekly"
groups:
prod-deps:
dependency-type: "production"
dev-deps:
dependency-type: "development"
open-pull-requests-limit: 0 # Disable version updates
security-updates: "all"
commit-message:
prefix: "apps/test-suite"
include: "scope"

View File

@ -45,7 +45,6 @@
],
"notes": "This one should not go backwards, but it does!"
},
{
"website": "https://agentops.ai/blog",
"expected_min_num_of_pages": 6,
@ -99,12 +98,10 @@
"website": "https://firecrawl.dev",
"expected_min_num_of_pages": 2,
"expected_crawled_pages": [
"https://firecrawl.dev/",
"https://firecrawl.dev/pricing"
"https://www.firecrawl.dev/",
"https://www.firecrawl.dev/pricing"
]
},
{
"website": "https://fly.io/docs/gpus/gpu-quickstart",
"expected_min_num_of_pages": 1,
@ -119,7 +116,6 @@
],
"notes": "This one should not go backwards, but it does!"
},
{
"website": "https://www.instructables.com/circuits",
"expected_min_num_of_pages": 12,
@ -153,26 +149,5 @@
"https://richmondconfidential.org/2009/10/13/before-napa-there-was-winehaven/",
"https://richmondconfidential.org/2009/10/13/family-calls-for-end-to-violence-at-memorial-for-slain-woman-friend/"
]
},
{
"website": "https://www.boardgamegeek.com",
"expected_min_num_of_pages": 15,
"expected_crawled_pages": [
"https://www.boardgamegeek.com/browse/boardgameartist",
"https://www.boardgamegeek.com/browse/boardgamehonor",
"https://www.boardgamegeek.com/browse/boardgamepublisher",
"https://www.boardgamegeek.com/browse/boardgamepodcast",
"https://www.boardgamegeek.com/wiki/page/Index",
"https://www.boardgamegeek.com/browse/boardgamecategory",
"https://www.boardgamegeek.com/boardgame/random",
"https://www.boardgamegeek.com/browse/boardgamemechanic",
"https://www.boardgamegeek.com/forums",
"https://www.boardgamegeek.com/gonecardboard",
"https://www.boardgamegeek.com/browse/boardgameaccessory",
"https://www.boardgamegeek.com/browse/boardgamedesigner",
"https://www.boardgamegeek.com/",
"https://www.boardgamegeek.com/previews",
"https://www.boardgamegeek.com/browse/boardgame"
]
}
]

View File

@ -96,8 +96,8 @@ describe("Crawling Checkup (E2E)", () => {
website: websiteData.website,
prompt: 'CRAWL',
expected_output: `SUCCESS: ${websiteData.expected_crawled_pages}`,
actual_output: `FAILURE: ${completedResponse.body.data}`,
error: `Expected crawled pages to contain ${websiteData.expected_crawled_pages}, but got ${completedResponse.body.data}`
actual_output: `FAILURE: ${completedResponse.body.data.map((d: { url: string }) => d.url)}`,
error: `Expected crawled pages to contain ${websiteData.expected_crawled_pages}, but got ${completedResponse.body.data.map((d: { url: string }) => d.url)}`
});
console.log('Error: ', errorLog);
continue;
@ -109,8 +109,8 @@ describe("Crawling Checkup (E2E)", () => {
website: websiteData.website,
prompt: 'CRAWL',
expected_output: `SUCCESS: ${websiteData.expected_not_crawled_pages}`,
actual_output: `FAILURE: ${completedResponse.body.data}`,
error: `Expected crawled pages to not contain ${websiteData.expected_not_crawled_pages}, but got ${completedResponse.body.data}`
actual_output: `FAILURE: ${completedResponse.body.data.map((d: { url: string }) => d.url)}`,
error: `Expected crawled pages to not contain ${websiteData.expected_not_crawled_pages}, but got ${completedResponse.body.data.map((d: { url: string }) => d.url)}`
});
console.log('Error: ', errorLog);
continue;