mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 14:33:17 +00:00
Implement improved json-l logging
- Add Logger class with methods for info, error, warn, debug, fatal - Add context, timestamp, and details fields to log entries - Log messages as JSON Lines - Replace puppeteer-cluster stats with custom stats implementation - Log behaviors by default - Amend argParser to reflect logging changes - Capture and log stdout/stderr from awaited child_processes - Modify tests to use webrecorder.net to avoid timeouts
This commit is contained in:
parent
2b03e23174
commit
0192d05f4c
16 changed files with 276 additions and 194 deletions
|
@ -4,10 +4,10 @@ import fs from "fs";
|
|||
|
||||
const exec = util.promisify(execCallback);
|
||||
|
||||
test("check that URLs one-depth out from the seed-list are crawled", async () => {
|
||||
test("check that URLs in seed-list are crawled", async () => {
|
||||
try {
|
||||
|
||||
await exec("docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection filelisttest --urlFile /tests/fixtures/urlSeedFile.txt --timeout 10000");
|
||||
await exec("docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection filelisttest --urlFile /tests/fixtures/urlSeedFile.txt --timeout 90000");
|
||||
}
|
||||
catch (error) {
|
||||
console.log(error);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue