mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 14:33:17 +00:00

* switch base image to chrome/chromium 105 with node 18.x * convert all source to esm for node 18.x, remove unneeded node-fetch dependency * ci: use node 18.x, update to latest actions * tests: convert to esm, run with --experimental-vm-modules * tests: set higher default timeout (90s) for all tests * tests: rename driver test fixture to .mjs for loading in jest * bump to 0.8.0
35 lines
971 B
JavaScript
35 lines
971 B
JavaScript
import child_process from "child_process";
|
|
import fs from "fs";
|
|
|
|
|
|
test("ensure custom driver with custom selector crawls JS files as pages", async () => {
|
|
try {
|
|
child_process.execSync("docker run -v $PWD/tests/fixtures:/tests/fixtures -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --url https://www.iana.org/ --collection custom-driver-1 --driver /tests/fixtures/driver-1.mjs");
|
|
}
|
|
catch (error) {
|
|
console.log(error);
|
|
}
|
|
|
|
const crawledPages = fs.readFileSync("test-crawls/collections/custom-driver-1/pages/pages.jsonl", "utf8");
|
|
const pages = new Set();
|
|
|
|
for (const line of crawledPages.trim().split("\n")) {
|
|
const url = JSON.parse(line).url;
|
|
if (!url) {
|
|
continue;
|
|
}
|
|
pages.add(url);
|
|
}
|
|
|
|
console.log(pages);
|
|
|
|
const expectedPages = new Set([
|
|
"https://www.iana.org/",
|
|
"https://www.iana.org/_js/jquery.js",
|
|
"https://www.iana.org/_js/iana.js"
|
|
]);
|
|
|
|
expect(pages).toEqual(expectedPages);
|
|
|
|
});
|
|
|