tests: disable retryStrategy for redis, test for better termination behavior

This commit is contained in:
Ilya Kreymer 2024-03-22 00:10:26 -07:00
parent f6a7dab3ba
commit 10e92a4f7b
4 changed files with 13 additions and 35 deletions

View file

@ -1,6 +1,10 @@
import { exec } from "child_process";
import Redis from "ioredis";
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
test("dynamically add exclusion while crawl is running", async () => {
let callback = null;
@ -20,18 +24,18 @@ test("dynamically add exclusion while crawl is running", async () => {
console.log(error);
}
await new Promise((resolve) => setTimeout(resolve, 3000));
await sleep(3000);
const redis = new Redis("redis://127.0.0.1:36382/0", { lazyConnect: true });
const redis = new Redis("redis://127.0.0.1:36382/0", { lazyConnect: true, retryStrategy: () => null })
await redis.connect({ maxRetriesPerRequest: 50 });
await redis.connect();
while (true) {
if (Number(await redis.zcard("test:q")) > 1) {
break;
}
await new Promise((resolve) => setTimeout(resolve, 500));
await sleep(500);
}
const uids = await redis.hkeys("test:status");
@ -48,6 +52,5 @@ test("dynamically add exclusion while crawl is running", async () => {
expect(stdout.indexOf("Add Exclusion") > 0).toBe(true);
expect(stdout.indexOf("Removing excluded URL") > 0).toBe(true);
await redis.disconnect();
});

View file

@ -29,7 +29,7 @@ test("run QA comparison, with write pages to redis", async () => {
crawler_exited = true;
});
const redis = new Redis("redis://127.0.0.1:36380/0", { lazyConnect: true });
const redis = new Redis("redis://127.0.0.1:36380/0", { lazyConnect: true, retryStrategy: () => null });
await sleep(3000);
@ -68,12 +68,6 @@ test("run QA comparison, with write pages to redis", async () => {
expect(count).toBe(3);
try {
await redis.disconnect();
} catch (e) {
console.log(e);
}
// wait for crawler exit
while (!crawler_exited) {
await sleep(100);

View file

@ -128,14 +128,11 @@ test("check crawl restarted with saved state", async () => {
await sleep(2000);
const redis = new Redis("redis://127.0.0.1:36379/0", { lazyConnect: true });
const redis = new Redis("redis://127.0.0.1:36379/0", { lazyConnect: true, retryStrategy: () => null });
try {
await redis.connect({
maxRetriesPerRequest: 100,
retryStrategy(times) {
return times < 100 ? 1000 : null;
},
});
await sleep(2000);
@ -150,11 +147,5 @@ test("check crawl restarted with saved state", async () => {
console.log(e);
} finally {
await waitContainer(containerId);
try {
await redis.disconnect();
} catch (e) {
// ignore
}
}
});

View file

@ -1,7 +1,6 @@
import child_process from "child_process";
import Redis from "ioredis";
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
@ -33,18 +32,15 @@ async function waitContainer(containerId) {
async function runCrawl(numExpected, url, sitemap="", limit=0) {
const containerId = child_process.execSync(`docker run -d -p 36381:6379 -e CRAWL_ID=test webrecorder/browsertrix-crawler crawl --url ${url} --sitemap ${sitemap} --limit ${limit} --context sitemap --logging debug --debugAccessRedis`, {encoding: "utf-8"});
await sleep(2000);
await sleep(3000);
const redis = new Redis("redis://127.0.0.1:36381/0", { lazyConnect: true });
const redis = new Redis("redis://127.0.0.1:36381/0", { lazyConnect: true, retryStrategy: () => null });
let finished = 0;
try {
await redis.connect({
maxRetriesPerRequest: 100,
retryStrategy(times) {
return times < 100 ? 1000 : null;
},
});
while (true) {
@ -58,11 +54,6 @@ async function runCrawl(numExpected, url, sitemap="", limit=0) {
console.error(e);
} finally {
await waitContainer(containerId);
try {
await redis.disconnect();
} catch (e) {
// ignore
}
}
expect(finished).toBeGreaterThanOrEqual(numExpected);
@ -79,4 +70,3 @@ test("test sitemap with limit", async () => {
test("test sitemap with limit, specific URL", async () => {
await runCrawl(1900, "https://www.mozilla.org/", "https://www.mozilla.org/sitemap.xml", 2000);
});