mirror of
https://github.com/mendableai/firecrawl.git
synced 2024-11-16 11:42:24 +08:00
Merge branch 'main' of https://github.com/mendableai/firecrawl
Some checks are pending
Fly Deploy / Pre-deploy checks (push) Waiting to run
Fly Deploy / Test Suite (push) Blocked by required conditions
Fly Deploy / Python SDK Tests (push) Blocked by required conditions
Fly Deploy / JavaScript SDK Tests (push) Blocked by required conditions
Fly Deploy / Deploy app (push) Blocked by required conditions
Fly Deploy / Build and publish Python SDK (push) Blocked by required conditions
Fly Deploy / Build and publish JavaScript SDK (push) Blocked by required conditions
Some checks are pending
Fly Deploy / Pre-deploy checks (push) Waiting to run
Fly Deploy / Test Suite (push) Blocked by required conditions
Fly Deploy / Python SDK Tests (push) Blocked by required conditions
Fly Deploy / JavaScript SDK Tests (push) Blocked by required conditions
Fly Deploy / Deploy app (push) Blocked by required conditions
Fly Deploy / Build and publish Python SDK (push) Blocked by required conditions
Fly Deploy / Build and publish JavaScript SDK (push) Blocked by required conditions
This commit is contained in:
commit
8a992b1596
|
@ -59,12 +59,6 @@ export class ScrapeEvents {
|
|||
|
||||
try {
|
||||
const previousLog = (await supabase.from("scrape_events").select().eq("id", logId).single()).data as any;
|
||||
|
||||
if (!previousLog) {
|
||||
Logger.warn("Previous log not found.");
|
||||
return;
|
||||
}
|
||||
|
||||
await supabase.from("scrape_events").update({
|
||||
content: {
|
||||
...previousLog.content,
|
||||
|
|
|
@ -131,7 +131,7 @@ const saveJob = async (job: Job, result: any) => {
|
|||
|
||||
if (error) throw new Error(error.message);
|
||||
try {
|
||||
// await job.moveToCompleted(null, false, false);
|
||||
await job.moveToCompleted(null, false, false);
|
||||
} catch (error) {
|
||||
// I think the job won't exist here anymore
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ export class WebScraperDataProvider {
|
|||
const jobStatus = await job.getState();
|
||||
if (jobStatus === "failed") {
|
||||
Logger.info(
|
||||
"Job " + job.id + " has failed or has been cancelled by the user. Stopping the job..."
|
||||
"Job has failed or has been cancelled by the user. Stopping the job..."
|
||||
);
|
||||
return [] as Document[];
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ export function getWebScraperQueue() {
|
|||
maxStalledCount: 10,
|
||||
},
|
||||
defaultJobOptions:{
|
||||
attempts: 5
|
||||
attempts: 2
|
||||
}
|
||||
});
|
||||
Logger.info("Web scraper queue created");
|
||||
|
|
|
@ -22,11 +22,6 @@ const wsq = getWebScraperQueue();
|
|||
async function processJob(job: Job, done) {
|
||||
Logger.info(`🐂 Worker taking job ${job.id}`);
|
||||
|
||||
const lockInterval = setInterval(() => {
|
||||
Logger.info(`🐂 Renewing lock for ${job.id}`);
|
||||
job.extendLock(60000);
|
||||
}, 15000);
|
||||
|
||||
try {
|
||||
job.progress({
|
||||
current: 1,
|
||||
|
@ -67,8 +62,7 @@ async function processJob(job: Job, done) {
|
|||
origin: job.data.origin,
|
||||
});
|
||||
Logger.info(`🐂 Job done ${job.id}`);
|
||||
clearInterval(lockInterval);
|
||||
done(null, null);
|
||||
done(null, data);
|
||||
} catch (error) {
|
||||
Logger.error(`🐂 Job errored ${job.id} - ${error}`);
|
||||
if (await getWebScraperQueue().isPaused(false)) {
|
||||
|
@ -114,9 +108,8 @@ async function processJob(job: Job, done) {
|
|||
pageOptions: job.data.pageOptions,
|
||||
origin: job.data.origin,
|
||||
});
|
||||
clearInterval(lockInterval);
|
||||
done(null, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
wsq.process(
|
||||
|
|
Loading…
Reference in New Issue
Block a user