| 
									
										
										
										
											2022-10-24 15:30:10 +02:00
										 |  |  | import fs from "fs"; | 
					
						
							|  |  |  | import yaml from "js-yaml"; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import util from "util"; | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  | import { exec as execCallback } from "child_process"; | 
					
						
							| 
									
										
										
										
											2022-10-24 15:30:10 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | const exec = util.promisify(execCallback); | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-10-24 15:30:10 +02:00
										 |  |  | test("check yaml config file with seed list is used", async () => { | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   try { | 
					
						
							|  |  |  |     await exec( | 
					
						
							|  |  |  |       "docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --config /tests/fixtures/crawl-1.yaml --depth 0", | 
					
						
							|  |  |  |     ); | 
					
						
							|  |  |  |   } catch (error) { | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  |     console.log(error); | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   const crawledPages = fs.readFileSync( | 
					
						
							|  |  |  |     "test-crawls/collections/configtest/pages/pages.jsonl", | 
					
						
							|  |  |  |     "utf8", | 
					
						
							|  |  |  |   ); | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  |   const pages = new Set(); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   for (const line of crawledPages.trim().split("\n")) { | 
					
						
							| 
									
										
											  
											
												Per-Seed Scoping Rules + Crawl Depth (#63)
* scoped seeds:
- support per-seed scoping (include + exclude), allowHash, depth, and sitemap options
- support maxDepth per seed #16
- combine --url, --seed and --urlFile/--seedFile urls into a unified seed list
arg parsing:
- simplify seed file options into --seedFile/--urlFile, move option in help display
- rename --maxDepth -> --depth, supported globally and per seed
- ensure custom parsed params from argParser passed back correctly (behaviors, logging, device emulation)
- update to latest js-yaml
- rename --yamlConfig -> --config
- config: support reading config from stdin if --config set to 'stdin'
* scope: fix typo in 'prefix' scope
* update browsertrix-behaviors to 0.2.2
* tests: add test for passing config via stdin, also adding --excludes via cmdline
* update README:
- latest cli, add docs on config via stdin
- rename --yamlConfig -> --config, consolidate --seedFile/--urlFile, move arg position
- info on scoped seeds
- list current scope types
											
										 
											2021-06-26 13:11:29 -07:00
										 |  |  |     const url = JSON.parse(line).url; | 
					
						
							|  |  |  |     if (url) { | 
					
						
							|  |  |  |       pages.add(url); | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   const config = yaml.load( | 
					
						
							|  |  |  |     fs.readFileSync("tests/fixtures/crawl-1.yaml", "utf8"), | 
					
						
							|  |  |  |   ); | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   let foundAllSeeds = true; | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  | 
 | 
					
						
							|  |  |  |   for (const seed of config.seeds) { | 
					
						
							|  |  |  |     const url = new URL(seed).href; | 
					
						
							|  |  |  |     if (!pages.has(url)) { | 
					
						
							|  |  |  |       foundAllSeeds = false; | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  |   expect(foundAllSeeds).toBe(true); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   expect( | 
					
						
							|  |  |  |     fs.existsSync("test-crawls/collections/configtest/configtest.wacz"), | 
					
						
							|  |  |  |   ).toBe(true); | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  | }); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | test("check yaml config file will be overwritten by command line", async () => { | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   try { | 
					
						
							|  |  |  |     await exec( | 
					
						
							|  |  |  |       "docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection configtest-2 --config /tests/fixtures/crawl-1.yaml --url https://specs.webrecorder.net/ --scopeType page --timeout 20000", | 
					
						
							|  |  |  |     ); | 
					
						
							|  |  |  |   } catch (error) { | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  |     console.log(error); | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-09 19:11:11 -05:00
										 |  |  |   const crawledPages = fs.readFileSync( | 
					
						
							|  |  |  |     "test-crawls/collections/configtest-2/pages/pages.jsonl", | 
					
						
							|  |  |  |     "utf8", | 
					
						
							|  |  |  |   ); | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  |   const pages = new Set(); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   for (const line of crawledPages.trim().split("\n")) { | 
					
						
							| 
									
										
										
										
											2021-07-23 18:31:43 -07:00
										 |  |  |     const url = JSON.parse(line).url; | 
					
						
							|  |  |  |     if (url) { | 
					
						
							|  |  |  |       pages.add(url); | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2022-12-15 12:38:41 -05:00
										 |  |  |   expect(pages.has("https://specs.webrecorder.net/")).toBe(true); | 
					
						
							| 
									
										
										
										
											2021-07-23 18:31:43 -07:00
										 |  |  |   expect(pages.size).toBe(1); | 
					
						
							| 
									
										
										
										
											2021-06-23 19:36:32 -07:00
										 |  |  | }); |