mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 14:33:17 +00:00

* switch base image to chrome/chromium 105 with node 18.x * convert all source to esm for node 18.x, remove unneeded node-fetch dependency * ci: use node 18.x, update to latest actions * tests: convert to esm, run with --experimental-vm-modules * tests: set higher default timeout (90s) for all tests * tests: rename driver test fixture to .mjs for loading in jest * bump to 0.8.0
62 lines
1.3 KiB
JavaScript
Executable file
62 lines
1.3 KiB
JavaScript
Executable file
#!/usr/bin/env node
|
|
|
|
import { Crawler } from "./crawler.js";
|
|
|
|
var crawler = null;
|
|
|
|
var lastSigInt = 0;
|
|
let forceTerm = false;
|
|
|
|
|
|
async function handleTerminate(signame) {
|
|
console.log(`${signame} received...`);
|
|
if (!crawler || !crawler.crawlState) {
|
|
console.log("error: no crawler running, exiting");
|
|
process.exit(1);
|
|
}
|
|
|
|
if (crawler.done) {
|
|
console.log("success: crawler done, exiting");
|
|
process.exit(0);
|
|
}
|
|
|
|
try {
|
|
if (!crawler.crawlState.drainMax) {
|
|
console.log("SIGNAL: gracefully finishing current pages...");
|
|
crawler.gracefulFinish();
|
|
|
|
} else if (forceTerm || (Date.now() - lastSigInt) > 200) {
|
|
console.log("SIGNAL: stopping crawl now...");
|
|
await crawler.serializeAndExit();
|
|
}
|
|
lastSigInt = Date.now();
|
|
} catch (e) {
|
|
console.log(e);
|
|
}
|
|
}
|
|
|
|
process.on("SIGINT", () => handleTerminate("SIGINT"));
|
|
|
|
process.on("SIGTERM", () => handleTerminate("SIGTERM"));
|
|
|
|
process.on("SIGABRT", async () => {
|
|
console.log("SIGABRT received, will force immediate exit on SIGTERM/SIGINT");
|
|
forceTerm = true;
|
|
});
|
|
|
|
process.on("SIGUSR1", () => {
|
|
if (crawler) {
|
|
crawler.prepareForExit(true);
|
|
}
|
|
});
|
|
|
|
process.on("SIGUSR2", () => {
|
|
if (crawler) {
|
|
crawler.prepareForExit(false);
|
|
}
|
|
});
|
|
|
|
crawler = new Crawler();
|
|
crawler.run();
|
|
|
|
|