fix(scrape,search): handle failed jobs

This commit is contained in:
Gergő Móricz 2024-08-23 18:47:56 +02:00
parent 52a05b8c6e
commit 5ef3926d2a
2 changed files with 25 additions and 7 deletions

View File

@ -58,9 +58,15 @@ export async function scrapeHelper(
if (Date.now() >= start + timeout) { if (Date.now() >= start + timeout) {
clearInterval(int); clearInterval(int);
reject(new Error("Job wait ")); reject(new Error("Job wait "));
} else if (await job.getState() === "completed") { } else {
clearInterval(int); const state = await job.getState();
resolve((await getScrapeQueue().getJob(job.id)).returnvalue); if (state === "completed") {
clearInterval(int);
resolve((await getScrapeQueue().getJob(job.id)).returnvalue);
} else if (state === "failed") {
clearInterval(int);
reject((await getScrapeQueue().getJob(job.id)).failedReason);
}
} }
}, 1000); }, 1000);
}))[0] }))[0]
@ -72,6 +78,12 @@ export async function scrapeHelper(
error: "Request timed out", error: "Request timed out",
returnCode: 408, returnCode: 408,
} }
} else if (typeof e === "string" && (e.includes("Error generating completions: ") || e.includes("Invalid schema for function"))) {
return {
success: false,
error: e,
returnCode: 500,
};
} else { } else {
throw e; throw e;
} }
@ -214,6 +226,6 @@ export async function scrapeController(req: Request, res: Response) {
} catch (error) { } catch (error) {
Sentry.captureException(error); Sentry.captureException(error);
Logger.error(error); Logger.error(error);
return res.status(500).json({ error: error.message }); return res.status(500).json({ error: typeof error === "string" ? error : (error?.message ?? "Internal Server Error") });
} }
} }

View File

@ -114,9 +114,15 @@ export async function searchHelper(
if (Date.now() >= start + 60000) { if (Date.now() >= start + 60000) {
clearInterval(int); clearInterval(int);
reject(new Error("Job wait ")); reject(new Error("Job wait "));
} else if (await x.getState() === "completed") { } else {
clearInterval(int); const state = await x.getState();
resolve((await getScrapeQueue().getJob(x.id)).returnvalue); if (state === "completed") {
clearInterval(int);
resolve((await getScrapeQueue().getJob(x.id)).returnvalue);
} else if (state === "failed") {
clearInterval(int);
reject((await getScrapeQueue().getJob(x.id)).failedReason);
}
} }
}, 1000); }, 1000);
})))).map(x => x[0]); })))).map(x => x[0]);