mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 14:33:17 +00:00

* Add --pageExtraDelay option to add extra delay/wait time after every page (fixes #131) * Store total page time in 'maxPageTime', include pageExtraDelay * Rename timeout->pageLoadTimeout * cleanup: - store seconds for most interval checks, convert to ms only for api calls, remove most sec<->ms conversions - add secondsElapsed() utility function to help checking time elapsed - cleanup comments --------- Co-authored-by: Ilya Kreymer <ikreymer@gmail.com>
31 lines
1,012 B
JavaScript
31 lines
1,012 B
JavaScript
import { logger, errJSON } from "./logger.js";
|
|
|
|
export function sleep(seconds) {
|
|
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
|
}
|
|
|
|
export function timedRun(promise, seconds, message="Promise timed out", logDetails={}, context="general") {
|
|
// return Promise return value or log error if timeout is reached first
|
|
const timeout = seconds * 1000;
|
|
|
|
const rejectPromiseOnTimeout = (timeout) => {
|
|
return new Promise((resolve, reject) => {
|
|
setTimeout(() => (reject("timeout reached")), timeout);
|
|
});
|
|
};
|
|
|
|
return Promise.race([promise, rejectPromiseOnTimeout(timeout)])
|
|
.catch((err) => {
|
|
if (err == "timeout reached") {
|
|
logger.error(message, {"seconds": seconds, ...logDetails}, context);
|
|
} else {
|
|
logger.error("Unknown exception", {...errJSON(err), ...logDetails}, context);
|
|
}
|
|
});
|
|
}
|
|
|
|
export function secondsElapsed(startTime, nowDate = null) {
|
|
nowDate = nowDate || new Date();
|
|
|
|
return (nowDate.getTime() - startTime) / 1000;
|
|
}
|