mirror of
https://github.com/webrecorder/browsertrix-crawler.git
synced 2025-10-19 14:33:17 +00:00

* Resolves #12 * Make --url param optional. Only one of --url of --urlFile should be specified. * Add ignoreScope option queueUrls() to support adding specific URLs * add tests for urlFile * bump version to 0.3.2 Co-authored-by: Emma Dickson <emmadickson@Emmas-MacBook-Pro.local>
34 lines
1 KiB
JavaScript
34 lines
1 KiB
JavaScript
const util = require("util");
|
|
const exec = util.promisify(require("child_process").exec);
|
|
const fs = require("fs");
|
|
|
|
test("check that all urls in a file list are crawled when the filelisturl param is passed", async () => {
|
|
jest.setTimeout(30000);
|
|
|
|
try{
|
|
|
|
await exec("docker-compose run -v $PWD/tests/fixtures:/app/tests/fixtures crawler crawl --collection filelisttest --urlFile tests/fixtures/urlSeedFile.txt --timeout 10000");
|
|
}
|
|
catch (error) {
|
|
console.log(error);
|
|
}
|
|
|
|
let crawled_pages = fs.readFileSync("crawls/collections/filelisttest/pages/pages.jsonl", "utf8");
|
|
let seed_file = fs.readFileSync("tests/fixtures/urlSeedFile.txt", "utf8").split("\n").sort();
|
|
|
|
let seed_file_list = [];
|
|
for (var j = 0; j < seed_file.length; j++) {
|
|
if (seed_file[j] != undefined){
|
|
seed_file_list.push(seed_file[j]);
|
|
}
|
|
}
|
|
|
|
let foundSeedUrl = true;
|
|
|
|
for (var i = 1; i < seed_file_list.length; i++) {
|
|
if (crawled_pages.indexOf(seed_file_list[i]) == -1){
|
|
foundSeedUrl = false;
|
|
}
|
|
}
|
|
expect(foundSeedUrl).toBe(true);
|
|
});
|