mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 14:33:17 +00:00

* additional fixes: - use distinct exit code for subsequent interrupt (13) and fatal interrupt (17) - if crawl has been stopped, mark for final exit for post crawl tasks - stopped takes precedence over interrupted: if both, still exit with 0 (and marked for final exit) - if no warcs found, crawl stopped, but previous pages found, don't consider failed! - cleanup: remove unused code, rename to gracefulFinishOnInterrupt, separate from graceful finish via crawl stopped
55 lines
1.3 KiB
JavaScript
Executable file
55 lines
1.3 KiB
JavaScript
Executable file
#!/usr/bin/env -S node --experimental-global-webcrypto
|
|
|
|
import { logger } from "./util/logger.js";
|
|
import { setExitOnRedisError } from "./util/redis.js";
|
|
import { Crawler } from "./crawler.js";
|
|
|
|
|
|
var crawler = null;
|
|
|
|
var lastSigInt = 0;
|
|
let forceTerm = false;
|
|
|
|
|
|
async function handleTerminate(signame) {
|
|
logger.info(`${signame} received...`);
|
|
if (!crawler || !crawler.crawlState) {
|
|
logger.error("error: no crawler running, exiting");
|
|
process.exit(1);
|
|
}
|
|
|
|
if (crawler.done) {
|
|
logger.info("success: crawler done, exiting");
|
|
process.exit(0);
|
|
}
|
|
|
|
setExitOnRedisError(true);
|
|
|
|
try {
|
|
if (!crawler.interrupted) {
|
|
logger.info("SIGNAL: gracefully finishing current pages...");
|
|
crawler.gracefulFinishOnInterrupt();
|
|
|
|
} else if (forceTerm || (Date.now() - lastSigInt) > 200) {
|
|
logger.info("SIGNAL: stopping crawl now...");
|
|
await crawler.serializeAndExit();
|
|
}
|
|
lastSigInt = Date.now();
|
|
} catch (e) {
|
|
logger.error("Error stopping crawl after receiving termination signal", e);
|
|
}
|
|
}
|
|
|
|
process.on("SIGINT", () => handleTerminate("SIGINT"));
|
|
|
|
process.on("SIGTERM", () => handleTerminate("SIGTERM"));
|
|
|
|
process.on("SIGABRT", async () => {
|
|
logger.info("SIGABRT received, will force immediate exit on SIGTERM/SIGINT");
|
|
forceTerm = true;
|
|
});
|
|
|
|
crawler = new Crawler();
|
|
crawler.run();
|
|
|
|
|