2021-06-23 19:36:32 -07:00
|
|
|
const yaml = require("js-yaml");
|
|
|
|
const util = require("util");
|
|
|
|
const exec = util.promisify(require("child_process").exec);
|
|
|
|
const fs = require("fs");
|
|
|
|
|
|
|
|
test("check yaml config file with seed list is used", async () => {
|
|
|
|
jest.setTimeout(30000);
|
|
|
|
|
|
|
|
try{
|
|
|
|
|
Per-Seed Scoping Rules + Crawl Depth (#63)
* scoped seeds:
- support per-seed scoping (include + exclude), allowHash, depth, and sitemap options
- support maxDepth per seed #16
- combine --url, --seed and --urlFile/--seedFile urls into a unified seed list
arg parsing:
- simplify seed file options into --seedFile/--urlFile, move option in help display
- rename --maxDepth -> --depth, supported globally and per seed
- ensure custom parsed params from argParser passed back correctly (behaviors, logging, device emulation)
- update to latest js-yaml
- rename --yamlConfig -> --config
- config: support reading config from stdin if --config set to 'stdin'
* scope: fix typo in 'prefix' scope
* update browsertrix-behaviors to 0.2.2
* tests: add test for passing config via stdin, also adding --excludes via cmdline
* update README:
- latest cli, add docs on config via stdin
- rename --yamlConfig -> --config, consolidate --seedFile/--urlFile, move arg position
- info on scoped seeds
- list current scope types
2021-06-26 13:11:29 -07:00
|
|
|
await exec("docker-compose run -v $PWD/tests/fixtures:/tests/fixtures crawler crawl --collection configtest --config /tests/fixtures/crawl-1.yaml --depth 0");
|
2021-06-23 19:36:32 -07:00
|
|
|
}
|
|
|
|
catch (error) {
|
|
|
|
console.log(error);
|
|
|
|
}
|
|
|
|
|
|
|
|
const crawledPages = fs.readFileSync("crawls/collections/configtest/pages/pages.jsonl", "utf8");
|
|
|
|
const pages = new Set();
|
|
|
|
|
|
|
|
for (const line of crawledPages.trim().split("\n")) {
|
Per-Seed Scoping Rules + Crawl Depth (#63)
* scoped seeds:
- support per-seed scoping (include + exclude), allowHash, depth, and sitemap options
- support maxDepth per seed #16
- combine --url, --seed and --urlFile/--seedFile urls into a unified seed list
arg parsing:
- simplify seed file options into --seedFile/--urlFile, move option in help display
- rename --maxDepth -> --depth, supported globally and per seed
- ensure custom parsed params from argParser passed back correctly (behaviors, logging, device emulation)
- update to latest js-yaml
- rename --yamlConfig -> --config
- config: support reading config from stdin if --config set to 'stdin'
* scope: fix typo in 'prefix' scope
* update browsertrix-behaviors to 0.2.2
* tests: add test for passing config via stdin, also adding --excludes via cmdline
* update README:
- latest cli, add docs on config via stdin
- rename --yamlConfig -> --config, consolidate --seedFile/--urlFile, move arg position
- info on scoped seeds
- list current scope types
2021-06-26 13:11:29 -07:00
|
|
|
const url = JSON.parse(line).url;
|
|
|
|
if (url) {
|
|
|
|
pages.add(url);
|
|
|
|
}
|
2021-06-23 19:36:32 -07:00
|
|
|
}
|
|
|
|
|
Per-Seed Scoping Rules + Crawl Depth (#63)
* scoped seeds:
- support per-seed scoping (include + exclude), allowHash, depth, and sitemap options
- support maxDepth per seed #16
- combine --url, --seed and --urlFile/--seedFile urls into a unified seed list
arg parsing:
- simplify seed file options into --seedFile/--urlFile, move option in help display
- rename --maxDepth -> --depth, supported globally and per seed
- ensure custom parsed params from argParser passed back correctly (behaviors, logging, device emulation)
- update to latest js-yaml
- rename --yamlConfig -> --config
- config: support reading config from stdin if --config set to 'stdin'
* scope: fix typo in 'prefix' scope
* update browsertrix-behaviors to 0.2.2
* tests: add test for passing config via stdin, also adding --excludes via cmdline
* update README:
- latest cli, add docs on config via stdin
- rename --yamlConfig -> --config, consolidate --seedFile/--urlFile, move arg position
- info on scoped seeds
- list current scope types
2021-06-26 13:11:29 -07:00
|
|
|
const config = yaml.load(fs.readFileSync("tests/fixtures/crawl-1.yaml", "utf8"));
|
2021-06-23 19:36:32 -07:00
|
|
|
|
|
|
|
let foundAllSeeds = true;
|
|
|
|
|
|
|
|
for (const seed of config.seeds) {
|
|
|
|
const url = new URL(seed).href;
|
|
|
|
if (!pages.has(url)) {
|
|
|
|
foundAllSeeds = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
expect(foundAllSeeds).toBe(true);
|
|
|
|
|
|
|
|
expect(fs.existsSync("crawls/collections/configtest/configtest.wacz")).toBe(true);
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
test("check yaml config file will be overwritten by command line", async () => {
|
|
|
|
jest.setTimeout(30000);
|
|
|
|
|
|
|
|
try{
|
|
|
|
|
Per-Seed Scoping Rules + Crawl Depth (#63)
* scoped seeds:
- support per-seed scoping (include + exclude), allowHash, depth, and sitemap options
- support maxDepth per seed #16
- combine --url, --seed and --urlFile/--seedFile urls into a unified seed list
arg parsing:
- simplify seed file options into --seedFile/--urlFile, move option in help display
- rename --maxDepth -> --depth, supported globally and per seed
- ensure custom parsed params from argParser passed back correctly (behaviors, logging, device emulation)
- update to latest js-yaml
- rename --yamlConfig -> --config
- config: support reading config from stdin if --config set to 'stdin'
* scope: fix typo in 'prefix' scope
* update browsertrix-behaviors to 0.2.2
* tests: add test for passing config via stdin, also adding --excludes via cmdline
* update README:
- latest cli, add docs on config via stdin
- rename --yamlConfig -> --config, consolidate --seedFile/--urlFile, move arg position
- info on scoped seeds
- list current scope types
2021-06-26 13:11:29 -07:00
|
|
|
await exec("docker-compose run -v $PWD/crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures crawler crawl --collection configtest --config /tests/fixtures/crawl-1.yaml --url https://www.example.com --timeout 20000");
|
2021-06-23 19:36:32 -07:00
|
|
|
}
|
|
|
|
catch (error) {
|
|
|
|
console.log(error);
|
|
|
|
}
|
|
|
|
|
|
|
|
const crawledPages = fs.readFileSync("crawls/collections/configtest/pages/pages.jsonl", "utf8");
|
|
|
|
const pages = new Set();
|
|
|
|
|
|
|
|
for (const line of crawledPages.trim().split("\n")) {
|
|
|
|
pages.add(JSON.parse(line).url);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
expect(pages.has("https://www.example.com/")).toBe(true);
|
|
|
|
|
|
|
|
|
|
|
|
});
|