mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 06:23:16 +00:00
clear out core dumps to avoid using up volume space: (#740)
- add 'ulimit -c' to startup script - delete any './core' files that exist in working dir just in case - fixes #738
This commit is contained in:
parent
b7150f1343
commit
bc4a95883d
2 changed files with 11 additions and 0 deletions
|
@ -1,5 +1,8 @@
|
|||
#!/bin/sh
|
||||
|
||||
# disable core dumps
|
||||
ulimit -c 0
|
||||
|
||||
# Get UID/GID from volume dir
|
||||
|
||||
VOLUME_UID=$(stat -c '%u' /crawls)
|
||||
|
|
|
@ -4,6 +4,7 @@ import { logger } from "./util/logger.js";
|
|||
import { setExitOnRedisError } from "./util/redis.js";
|
||||
import { Crawler } from "./crawler.js";
|
||||
import { ReplayCrawler } from "./replaycrawler.js";
|
||||
import fs from "node:fs";
|
||||
|
||||
let crawler: Crawler | null = null;
|
||||
|
||||
|
@ -56,4 +57,11 @@ if (process.argv[1].endsWith("qa")) {
|
|||
crawler = new Crawler();
|
||||
}
|
||||
|
||||
// remove any core dumps which could be taking up space in the working dir
|
||||
try {
|
||||
fs.unlinkSync("./core");
|
||||
} catch (e) {
|
||||
//ignore
|
||||
}
|
||||
|
||||
await crawler.run();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue