2022-10-24 15:30:10 +02:00
|
|
|
import util from "util";
|
2025-09-12 13:34:41 -07:00
|
|
|
import { spawn, exec as execCallback } from "child_process";
|
2022-10-24 15:30:10 +02:00
|
|
|
import fs from "fs";
|
2025-11-26 19:30:27 -08:00
|
|
|
import os from "os";
|
|
|
|
|
import path from "path";
|
2021-05-13 01:57:06 -04:00
|
|
|
|
2022-10-24 15:30:10 +02:00
|
|
|
const exec = util.promisify(execCallback);
|
2021-05-13 01:57:06 -04:00
|
|
|
|
2025-09-12 13:34:41 -07:00
|
|
|
let proc = null;
|
|
|
|
|
|
|
|
|
|
const DOCKER_HOST_NAME = process.env.DOCKER_HOST_NAME || "host.docker.internal";
|
|
|
|
|
const TEST_HOST = `http://${DOCKER_HOST_NAME}:31502`;
|
|
|
|
|
|
2025-11-26 19:30:27 -08:00
|
|
|
const fixtures = path.join("tests", "fixtures");
|
|
|
|
|
const seedFileCopy = path.join(fixtures, "seedFileCopy.txt");
|
|
|
|
|
|
2025-09-12 13:34:41 -07:00
|
|
|
beforeAll(() => {
|
2025-11-26 19:30:27 -08:00
|
|
|
fs.copyFileSync(path.join(fixtures, "urlSeedFile.txt"), seedFileCopy);
|
|
|
|
|
|
|
|
|
|
proc = spawn("../../node_modules/.bin/http-server", ["-p", "31502"], {cwd: fixtures});
|
2025-09-12 13:34:41 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
afterAll(() => {
|
|
|
|
|
if (proc) {
|
|
|
|
|
proc.kill();
|
2025-11-26 19:30:27 -08:00
|
|
|
proc = null;
|
2025-09-12 13:34:41 -07:00
|
|
|
}
|
2025-11-26 19:30:27 -08:00
|
|
|
fs.unlinkSync(seedFileCopy);
|
2025-09-12 13:34:41 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
2025-11-26 19:30:27 -08:00
|
|
|
function verifyAllSeedsCrawled(collName, hasDownload) {
|
2023-11-09 19:11:11 -05:00
|
|
|
let crawled_pages = fs.readFileSync(
|
2025-11-26 19:30:27 -08:00
|
|
|
`test-crawls/collections/${collName}/pages/pages.jsonl`,
|
2023-11-09 19:11:11 -05:00
|
|
|
"utf8",
|
|
|
|
|
);
|
2025-11-26 19:30:27 -08:00
|
|
|
|
|
|
|
|
const seedFile = hasDownload ? `test-crawls/collections/${collName}/downloads/seeds-seedFileCopy.txt` : "tests/fixtures/urlSeedFile.txt";
|
2023-11-09 19:11:11 -05:00
|
|
|
let seed_file = fs
|
2025-11-26 19:30:27 -08:00
|
|
|
.readFileSync(seedFile, "utf8")
|
2023-11-09 19:11:11 -05:00
|
|
|
.split("\n")
|
|
|
|
|
.sort();
|
2021-05-13 01:57:06 -04:00
|
|
|
|
|
|
|
|
let seed_file_list = [];
|
|
|
|
|
for (var j = 0; j < seed_file.length; j++) {
|
2023-11-09 19:11:11 -05:00
|
|
|
if (seed_file[j] != undefined) {
|
2021-05-13 01:57:06 -04:00
|
|
|
seed_file_list.push(seed_file[j]);
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-12-21 12:06:13 -05:00
|
|
|
|
|
|
|
|
let foundSeedUrl = true;
|
2021-05-13 01:57:06 -04:00
|
|
|
|
|
|
|
|
for (var i = 1; i < seed_file_list.length; i++) {
|
2023-11-09 19:11:11 -05:00
|
|
|
if (crawled_pages.indexOf(seed_file_list[i]) == -1) {
|
2021-05-13 01:57:06 -04:00
|
|
|
foundSeedUrl = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
expect(foundSeedUrl).toBe(true);
|
2025-11-26 19:30:27 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test("check that URLs in seed-list are crawled", async () => {
|
|
|
|
|
try {
|
|
|
|
|
await exec(
|
|
|
|
|
"docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection filelisttest --urlFile /tests/fixtures/urlSeedFile.txt --timeout 90000 --scopeType page",
|
|
|
|
|
);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
verifyAllSeedsCrawled("filelisttest", false);
|
2021-05-13 01:57:06 -04:00
|
|
|
});
|
2025-07-03 10:49:37 -04:00
|
|
|
|
|
|
|
|
|
|
|
|
|
test("check that URLs in seed-list hosted at URL are crawled", async () => {
|
|
|
|
|
try {
|
|
|
|
|
await exec(
|
2025-11-26 19:30:27 -08:00
|
|
|
`docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --collection onlinefilelisttest --urlFile "${TEST_HOST}/seedFileCopy.txt" --timeout 90000 --scopeType page`,
|
|
|
|
|
);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
verifyAllSeedsCrawled("onlinefilelisttest", true);
|
|
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test("start crawl, interrupt, remove seed file, and ensure all seed URLs are crawled", async () => {
|
|
|
|
|
try {
|
|
|
|
|
await exec(
|
|
|
|
|
`docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --collection seed-file-removed --urlFile "${TEST_HOST}/seedFileCopy.txt" --timeout 90000 --scopeType page --limit 1`,
|
2025-07-03 10:49:37 -04:00
|
|
|
);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let crawled_pages = fs.readFileSync(
|
2025-11-26 19:30:27 -08:00
|
|
|
"test-crawls/collections/seed-file-removed/pages/pages.jsonl",
|
2025-07-03 10:49:37 -04:00
|
|
|
"utf8",
|
|
|
|
|
);
|
|
|
|
|
|
2025-11-26 19:30:27 -08:00
|
|
|
expect(crawled_pages.split("\n").length === 2);
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
// move file so server returns 404
|
|
|
|
|
fs.renameSync(seedFileCopy, seedFileCopy + ".bak");
|
|
|
|
|
|
|
|
|
|
// server no longer up
|
|
|
|
|
try {
|
|
|
|
|
const res = await fetch("http://localhost:31502/");
|
|
|
|
|
expect(res.status).toBe(404);
|
|
|
|
|
} catch (e) {
|
|
|
|
|
// ignore
|
2025-07-03 10:49:37 -04:00
|
|
|
}
|
2025-11-26 19:30:27 -08:00
|
|
|
|
|
|
|
|
// restart crawl, but with invalid seed list now
|
|
|
|
|
await exec(
|
|
|
|
|
`docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection seed-file-removed --urlFile "${TEST_HOST}/seedFileCopy.txt" --timeout 90000 --scopeType page`,
|
|
|
|
|
);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
|
|
|
|
} finally {
|
|
|
|
|
// move back
|
|
|
|
|
fs.renameSync(seedFileCopy + ".bak", seedFileCopy);
|
2025-07-03 10:49:37 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2025-11-26 19:30:27 -08:00
|
|
|
verifyAllSeedsCrawled("seed-file-removed", true);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test("start crawl, interrupt, stop seed file server, and ensure all seed URLs are crawled", async () => {
|
|
|
|
|
try {
|
|
|
|
|
await exec(
|
|
|
|
|
`docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --collection seed-file-server-gone --urlFile "${TEST_HOST}/seedFileCopy.txt" --timeout 90000 --scopeType page --limit 1`,
|
|
|
|
|
);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
2025-07-03 10:49:37 -04:00
|
|
|
}
|
2025-11-26 19:30:27 -08:00
|
|
|
|
|
|
|
|
let crawled_pages = fs.readFileSync(
|
|
|
|
|
"test-crawls/collections/seed-file-server-gone/pages/pages.jsonl",
|
|
|
|
|
"utf8",
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
expect(crawled_pages.split("\n").length === 2);
|
|
|
|
|
|
|
|
|
|
// kill server that serves the seed list
|
|
|
|
|
proc.kill();
|
|
|
|
|
|
|
|
|
|
// server no longer up
|
|
|
|
|
await expect(() => fetch("http://localhost:31502/")).rejects.toThrow("fetch failed");
|
|
|
|
|
|
|
|
|
|
// restart crawl, but with invalid seed list now
|
|
|
|
|
try {
|
|
|
|
|
await exec(
|
|
|
|
|
`docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection seed-file-server-gone --urlFile "${TEST_HOST}/seedFileCopy.txt" --timeout 90000 --scopeType page`,
|
|
|
|
|
);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log(error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
verifyAllSeedsCrawled("seed-file-server-gone", true);
|
2025-07-03 10:49:37 -04:00
|
|
|
});
|