Following snippet works well for me, but it smells... sb have a cleaner approach?
// Every 3s, check for the ratio of finished (=success) and failed requests and stop the process if it's too bad
setInterval(() => {
const { requestsFinished, requestsFailed } = crawler.stats.state
if (requestsFailed > requestsFinished + 10) { // when failed 10 more than finished, stop trying bro
console.warn(`π£ Too many failed requests, stopping! (${requestsFailed} failed, ${requestsFinished} finished)`)
process.exit(1)
}
}, 3000)