browsertrix-crawler/tests/url_file_list.test.js
Ilya Kreymer a2742df328
seed urls list: check for quoted URLs and remove quotes (#883)
- check for urls that are wrapped in quotes, eg. 'https://example.com/'
or "https://example.com/" and trim and remove the quotes before adding seed
- tests: add quoted URL to tests, fix old.webrecorder.net test
- deps: update wabac.js, RWP to latest
- logging: reduce error logging for seed lists, only log once that there are duplicates or page limit is reached
- fix for #882
2025-09-12 13:34:41 -07:00

93 lines
2.4 KiB
JavaScript

import util from "util";
import { spawn, exec as execCallback } from "child_process";
import fs from "fs";
const exec = util.promisify(execCallback);
let proc = null;
const DOCKER_HOST_NAME = process.env.DOCKER_HOST_NAME || "host.docker.internal";
const TEST_HOST = `http://${DOCKER_HOST_NAME}:31502`;
beforeAll(() => {
proc = spawn("../../node_modules/.bin/http-server", ["-p", "31502"], {cwd: "tests/fixtures/"});
});
afterAll(() => {
if (proc) {
proc.kill();
}
});
test("check that URLs in seed-list are crawled", async () => {
try {
await exec(
"docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection filelisttest --urlFile /tests/fixtures/urlSeedFile.txt --timeout 90000 --scopeType page",
);
} catch (error) {
console.log(error);
}
let crawled_pages = fs.readFileSync(
"test-crawls/collections/filelisttest/pages/pages.jsonl",
"utf8",
);
let seed_file = fs
.readFileSync("tests/fixtures/urlSeedFile.txt", "utf8")
.split("\n")
.sort();
let seed_file_list = [];
for (var j = 0; j < seed_file.length; j++) {
if (seed_file[j] != undefined) {
seed_file_list.push(seed_file[j]);
}
}
let foundSeedUrl = true;
for (var i = 1; i < seed_file_list.length; i++) {
if (crawled_pages.indexOf(seed_file_list[i]) == -1) {
foundSeedUrl = false;
}
}
expect(foundSeedUrl).toBe(true);
});
test("check that URLs in seed-list hosted at URL are crawled", async () => {
try {
await exec(
`docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection onlinefilelisttest --urlFile "${TEST_HOST}/urlSeedFile.txt" --timeout 90000 --scopeType page`,
);
} catch (error) {
console.log(error);
}
let crawled_pages = fs.readFileSync(
"test-crawls/collections/onlinefilelisttest/pages/pages.jsonl",
"utf8",
);
let seed_file = fs
.readFileSync("tests/fixtures/urlSeedFile.txt", "utf8")
.split("\n")
.sort();
let seed_file_list = [];
for (var j = 0; j < seed_file.length; j++) {
if (seed_file[j] != undefined) {
seed_file_list.push(seed_file[j]);
}
}
let foundSeedUrl = true;
for (var i = 1; i < seed_file_list.length; i++) {
if (crawled_pages.indexOf(seed_file_list[i]) == -1) {
foundSeedUrl = false;
}
}
expect(foundSeedUrl).toBe(true);
});