2022-10-24 15:30:10 +02:00
|
|
|
import util from "util";
|
2023-11-09 19:11:11 -05:00
|
|
|
import { exec as execCallback } from "child_process";
|
2022-10-24 15:30:10 +02:00
|
|
|
import fs from "fs";
|
2021-05-13 01:57:06 -04:00
|
|
|
|
2022-10-24 15:30:10 +02:00
|
|
|
const exec = util.promisify(execCallback);
|
2021-05-13 01:57:06 -04:00
|
|
|
|
2022-12-15 12:38:41 -05:00
|
|
|
test("check that URLs in seed-list are crawled", async () => {
|
2022-01-15 09:03:09 -08:00
|
|
|
try {
|
2023-11-09 19:11:11 -05:00
|
|
|
await exec(
|
|
|
|
"docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection filelisttest --urlFile /tests/fixtures/urlSeedFile.txt --timeout 90000",
|
|
|
|
);
|
|
|
|
} catch (error) {
|
2021-05-13 01:57:06 -04:00
|
|
|
console.log(error);
|
|
|
|
}
|
|
|
|
|
2023-11-09 19:11:11 -05:00
|
|
|
let crawled_pages = fs.readFileSync(
|
|
|
|
"test-crawls/collections/filelisttest/pages/pages.jsonl",
|
|
|
|
"utf8",
|
|
|
|
);
|
|
|
|
let seed_file = fs
|
|
|
|
.readFileSync("tests/fixtures/urlSeedFile.txt", "utf8")
|
|
|
|
.split("\n")
|
|
|
|
.sort();
|
2021-05-13 01:57:06 -04:00
|
|
|
|
|
|
|
let seed_file_list = [];
|
|
|
|
for (var j = 0; j < seed_file.length; j++) {
|
2023-11-09 19:11:11 -05:00
|
|
|
if (seed_file[j] != undefined) {
|
2021-05-13 01:57:06 -04:00
|
|
|
seed_file_list.push(seed_file[j]);
|
|
|
|
}
|
|
|
|
}
|
2022-12-21 12:06:13 -05:00
|
|
|
|
|
|
|
let foundSeedUrl = true;
|
2021-05-13 01:57:06 -04:00
|
|
|
|
|
|
|
for (var i = 1; i < seed_file_list.length; i++) {
|
2023-11-09 19:11:11 -05:00
|
|
|
if (crawled_pages.indexOf(seed_file_list[i]) == -1) {
|
2021-05-13 01:57:06 -04:00
|
|
|
foundSeedUrl = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
expect(foundSeedUrl).toBe(true);
|
|
|
|
});
|