| 
									
										
										
										
											2024-11-08 08:04:41 -08:00
										 |  |  | import child_process from "child_process"; | 
					
						
							|  |  |  | import fs from "fs"; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | test("test custom selector crawls JS files as pages", async () => { | 
					
						
							|  |  |  |   try { | 
					
						
							|  |  |  |     child_process.execSync( | 
					
						
							|  |  |  |       "docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --url https://www.iana.org/ --collection custom-sel-1 --selectLinks \"script[src]->src\"", | 
					
						
							|  |  |  |     ); | 
					
						
							|  |  |  |   } catch (error) { | 
					
						
							|  |  |  |     console.log(error); | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   const crawledPages = fs.readFileSync( | 
					
						
							|  |  |  |     "test-crawls/collections/custom-sel-1/pages/pages.jsonl", | 
					
						
							|  |  |  |     "utf8", | 
					
						
							|  |  |  |   ); | 
					
						
							|  |  |  |   const pages = new Set(); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   for (const line of crawledPages.trim().split("\n")) { | 
					
						
							|  |  |  |     const url = JSON.parse(line).url; | 
					
						
							|  |  |  |     if (!url) { | 
					
						
							|  |  |  |       continue; | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  |     pages.add(url); | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   const crawledExtraPages = fs.readFileSync( | 
					
						
							|  |  |  |     "test-crawls/collections/custom-sel-1/pages/extraPages.jsonl", | 
					
						
							|  |  |  |     "utf8", | 
					
						
							|  |  |  |   ); | 
					
						
							|  |  |  |   const extraPages = new Set(); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   for (const line of crawledExtraPages.trim().split("\n")) { | 
					
						
							|  |  |  |     const url = JSON.parse(line).url; | 
					
						
							|  |  |  |     if (!url) { | 
					
						
							|  |  |  |       continue; | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  |     extraPages.add(url); | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   const expectedPages = new Set([ | 
					
						
							|  |  |  |     "https://www.iana.org/", | 
					
						
							|  |  |  |   ]); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   const expectedExtraPages = new Set([ | 
					
						
							|  |  |  |     "https://www.iana.org/_js/jquery.js", | 
					
						
							|  |  |  |     "https://www.iana.org/_js/iana.js", | 
					
						
							|  |  |  |   ]); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   expect(pages).toEqual(expectedPages); | 
					
						
							|  |  |  |   expect(extraPages).toEqual(expectedExtraPages); | 
					
						
							|  |  |  | }); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | test("test invalid selector, crawl fails", async () => { | 
					
						
							| 
									
										
										
										
											2025-03-30 16:48:41 -04:00
										 |  |  |   let status = 0; | 
					
						
							| 
									
										
										
										
											2024-11-08 08:04:41 -08:00
										 |  |  |   try { | 
					
						
							|  |  |  |     child_process.execSync( | 
					
						
							|  |  |  |       "docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --url https://www.iana.org/ --collection custom-sel-invalid --selectLinks \"script[\"", | 
					
						
							|  |  |  |     ); | 
					
						
							| 
									
										
										
										
											2025-03-30 16:48:41 -04:00
										 |  |  |   } catch (e) { | 
					
						
							|  |  |  |     status = e.status; | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // logger fatal exit code
 | 
					
						
							|  |  |  |   expect(status).toBe(17); | 
					
						
							|  |  |  | }); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | test("test valid autoclick selector passes validation", async () => { | 
					
						
							|  |  |  |   let failed = false; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   try { | 
					
						
							|  |  |  |     child_process.execSync( | 
					
						
							| 
									
										
										
										
											2025-09-19 23:21:47 -07:00
										 |  |  |       "docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --url https://example-com.webrecorder.net/ --clickSelector button --scopeType page", | 
					
						
							| 
									
										
										
										
											2025-03-30 16:48:41 -04:00
										 |  |  |     ); | 
					
						
							|  |  |  |   } catch (e) { | 
					
						
							| 
									
										
										
										
											2024-11-08 08:04:41 -08:00
										 |  |  |     failed = true; | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-30 16:48:41 -04:00
										 |  |  |   // valid clickSelector
 | 
					
						
							|  |  |  |   expect(failed).toBe(false); | 
					
						
							| 
									
										
										
										
											2024-11-08 08:04:41 -08:00
										 |  |  | }); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-30 16:48:41 -04:00
										 |  |  | 
 | 
					
						
							|  |  |  | test("test invalid autoclick selector fails validation, crawl fails", async () => { | 
					
						
							|  |  |  |   let status = 0; | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   try { | 
					
						
							|  |  |  |     child_process.execSync( | 
					
						
							| 
									
										
										
										
											2025-09-19 23:21:47 -07:00
										 |  |  |       "docker run -v $PWD/test-crawls:/crawls webrecorder/browsertrix-crawler crawl --url https://example-com.webrecorder.net/ --clickSelector \",\" --scopeType page", | 
					
						
							| 
									
										
										
										
											2025-03-30 16:48:41 -04:00
										 |  |  |     ); | 
					
						
							|  |  |  |   } catch (e) { | 
					
						
							|  |  |  |     status = e.status; | 
					
						
							|  |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // logger fatal exit code
 | 
					
						
							|  |  |  |   expect(status).toBe(17); | 
					
						
							|  |  |  | }); | 
					
						
							| 
									
										
										
										
											2024-11-08 08:04:41 -08:00
										 |  |  |   |