Add Prettier to the repo, and format all the files! (#428)

This adds prettier to the repo, and sets up the pre-commit hook to
auto-format as well as lint.
Also updates ignores files to exclude crawls, test-crawls, scratch, dist as needed.
This commit is contained in:
Emma Segal-Grossman 2023-11-09 19:11:11 -05:00 committed by GitHub
parent af1e0860e4
commit 2a49406df7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 3192 additions and 2026 deletions

View file

@ -13,7 +13,7 @@ function waitForProcess() {
};
});
return {p, callback};
return { p, callback };
}
var savedStateFile;
@ -28,9 +28,12 @@ test("check crawl interrupted + saved state written", async () => {
const wait = waitForProcess();
try {
proc = exec("docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection int-state-test --url https://webrecorder.net/ --limit 20", {"shell": "/bin/bash"}, wait.callback);
}
catch (error) {
proc = exec(
"docker run -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection int-state-test --url https://webrecorder.net/ --limit 20",
{ shell: "/bin/bash" },
wait.callback,
);
} catch (error) {
console.log(error);
}
@ -45,12 +48,15 @@ test("check crawl interrupted + saved state written", async () => {
while (true) {
try {
const pages = fs.readFileSync(pagesFile, {encoding: "utf-8"}).trim().split("\n");
const pages = fs
.readFileSync(pagesFile, { encoding: "utf-8" })
.trim()
.split("\n");
if (pages.length >= 2) {
break;
}
} catch(e) {
} catch (e) {
// ignore
}
@ -61,18 +67,22 @@ test("check crawl interrupted + saved state written", async () => {
await wait.p;
const savedStates = fs.readdirSync("test-crawls/collections/int-state-test/crawls");
const savedStates = fs.readdirSync(
"test-crawls/collections/int-state-test/crawls",
);
expect(savedStates.length > 0).toEqual(true);
savedStateFile = savedStates[savedStates.length - 1];
});
test("check parsing saved state + page done + queue present", () => {
expect(savedStateFile).toBeTruthy();
const savedState = fs.readFileSync(path.join("test-crawls/collections/int-state-test/crawls", savedStateFile), "utf-8");
const savedState = fs.readFileSync(
path.join("test-crawls/collections/int-state-test/crawls", savedStateFile),
"utf-8",
);
const saved = yaml.load(savedState);
expect(!!saved.state).toBe(true);
@ -82,31 +92,33 @@ test("check parsing saved state + page done + queue present", () => {
expect(state.done > 0).toEqual(true);
expect(state.queued.length > 0).toEqual(true);
});
test("check crawl restarted with saved state", async () => {
let proc = null;
const wait = waitForProcess();
try {
proc = exec(`docker run -p 36379:6379 -e CRAWL_ID=test -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection int-state-test --url https://webrecorder.net/ --config /crawls/collections/int-state-test/crawls/${savedStateFile} --debugAccessRedis --limit 5`, {shell: "/bin/bash"}, wait.callback);
proc = exec(
`docker run -p 36379:6379 -e CRAWL_ID=test -v $PWD/test-crawls:/crawls -v $PWD/tests/fixtures:/tests/fixtures webrecorder/browsertrix-crawler crawl --collection int-state-test --url https://webrecorder.net/ --config /crawls/collections/int-state-test/crawls/${savedStateFile} --debugAccessRedis --limit 5`,
{ shell: "/bin/bash" },
wait.callback,
);
} catch (error) {
console.log(error);
}
await new Promise((resolve) => setTimeout(resolve, 2000));
redis = new Redis("redis://127.0.0.1:36379/0", {lazyConnect: true});
redis = new Redis("redis://127.0.0.1:36379/0", { lazyConnect: true });
try {
await redis.connect({
maxRetriesPerRequest: 100,
retryStrategy(times) {
return times < 100 ? 1000 : null;
}
},
});
await new Promise((resolve) => setTimeout(resolve, 2000));
@ -126,5 +138,3 @@ test("interrupt crawl and exit", async () => {
expect(res[0].value).toBe(0);
});