prevent crawlRec from crashing when dir_path does not exist

This commit is contained in:
Thomas Silvestre 2023-06-21 16:24:58 +02:00
parent 8d5ce21df4
commit 387c71c0aa
2 changed files with 74 additions and 2 deletions

View File

@ -1,8 +1,15 @@
import * as fs from "fs"; import * as fs from "fs";
import * as path from "path"; import * as path from "path";
const crawlRec = (dir_path: string, paths: string[]) => { export const crawlRec = (dir_path: string, paths: string[]) => {
for (const file_name of fs.readdirSync(dir_path)) { let file_names: string[] = [];
try {
file_names = fs.readdirSync(dir_path);
} catch (e) {
// dir_path does not exist
return;
}
for (const file_name of file_names) {
const file_path = path.join(dir_path, file_name); const file_path = path.join(dir_path, file_name);
if (fs.lstatSync(file_path).isDirectory()) { if (fs.lstatSync(file_path).isDirectory()) {

View File

@ -0,0 +1,65 @@
import path from "path";
import { it, describe, expect, vi, beforeAll, afterAll } from "vitest";
import { crawlRec } from "keycloakify/bin/tools/crawl";
describe("crawl", () => {
describe("crawRec", () => {
beforeAll(() => {
vi.mock("node:fs", async () => {
const mod = await vi.importActual<typeof import("fs")>("fs");
return {
...mod,
readdirSync: vi.fn().mockImplementation((dir_path: string) => {
switch (dir_path) {
case "root_dir":
return ["sub_1_dir", "file_1", "sub_2_dir", "file_2"];
case path.join("root_dir", "sub_1_dir"):
return ["file_3", "sub_3_dir", "file_4"];
case path.join("root_dir", "sub_1_dir", "sub_3_dir"):
return ["file_5"];
case path.join("root_dir", "sub_2_dir"):
return [];
default: {
const enoent = new Error(`ENOENT: no such file or directory, scandir '${dir_path}'`);
// @ts-ignore
enoent.code = "ENOENT";
// @ts-ignore
enoent.syscall = "open";
// @ts-ignore
enoent.path = dir_path;
throw enoent;
}
}
}),
lstatSync: vi.fn().mockImplementation((file_path: string) => {
return { isDirectory: () => file_path.endsWith("_dir") };
})
};
});
});
afterAll(() => {
vi.resetAllMocks();
});
it("returns files under a given dir_path", async () => {
const paths: string[] = [];
crawlRec("root_dir/sub_1_dir/sub_3_dir", paths);
expect(paths).toEqual(["root_dir/sub_1_dir/sub_3_dir/file_5"]);
});
it("returns files recursively under a given dir_path", async () => {
const paths: string[] = [];
crawlRec("root_dir", paths);
expect(paths).toEqual([
"root_dir/sub_1_dir/file_3",
"root_dir/sub_1_dir/sub_3_dir/file_5",
"root_dir/sub_1_dir/file_4",
"root_dir/file_1",
"root_dir/file_2"
]);
});
it("return empty file list if dir_path does not exist", async () => {
const paths: string[] = [];
crawlRec("404", paths);
expect(paths).toEqual([]);
});
});
});