|
| 1 | +const fs = require("fs"); |
| 2 | +const path = require("path"); |
| 3 | +const glob = require("glob"); |
| 4 | +const process = require("process"); |
| 5 | + |
| 6 | +const findDirectoryPath = (targetDirectoryName) => { |
| 7 | + const pathToCheck = path.join(process.cwd(), targetDirectoryName); |
| 8 | + const folders = fs |
| 9 | + .readdirSync(pathToCheck, { withFileTypes: true }) |
| 10 | + .filter( |
| 11 | + (folder) => folder.isDirectory() && !folder.name.endsWith(".egg-info") |
| 12 | + ) |
| 13 | + .map((folder) => ({ |
| 14 | + name: folder.name, |
| 15 | + path: path.join(pathToCheck, folder.name), |
| 16 | + })); |
| 17 | + const routesDirectory = path.join(folders[0].path, "routes"); |
| 18 | + return [routesDirectory, folders[0].name]; |
| 19 | +}; |
| 20 | + |
| 21 | +const [directoryPath, project_name] = findDirectoryPath("src/"); |
| 22 | + |
| 23 | +const outputFile = path.join(process.cwd(), "schemas.json"); |
| 24 | + |
| 25 | +function return_json_schema(directoryPath, folder_path, project_name) { |
| 26 | + const folders = fs |
| 27 | + .readdirSync(path.normalize(directoryPath), { withFileTypes: true }) |
| 28 | + .filter((folder) => folder.isDirectory()) |
| 29 | + .map((folder) => ({ |
| 30 | + name: folder.name, |
| 31 | + path: path.join(directoryPath, folder.name), |
| 32 | + })); |
| 33 | + var folders_schemas = {}; |
| 34 | + folders.forEach((folder) => { |
| 35 | + if (folder.name == "schemas") { |
| 36 | + const jsonFiles = glob.sync(path.join(folder.path, "**/*.json")); |
| 37 | + var schemas = {}; |
| 38 | + jsonFiles.forEach((filePath) => { |
| 39 | + try { |
| 40 | + const fileContent = fs.readFileSync(filePath, "utf8"); |
| 41 | + var jsonData = JSON.parse(fileContent); |
| 42 | + var filename = filePath |
| 43 | + .replace(/^.*[\\/]/, "") |
| 44 | + .replace(/\.[^/.]+$/, ""); |
| 45 | + var route = jsonData["route"]; |
| 46 | + jsonData["$id"] = project_name + folder_path + route; |
| 47 | + schemas[filename] = jsonData; |
| 48 | + } catch (error) { |
| 49 | + console.error( |
| 50 | + `Erreur lors de la lecture du fichier ${filePath}:`, |
| 51 | + error |
| 52 | + ); |
| 53 | + } |
| 54 | + }); |
| 55 | + folders_schemas = Object.keys(schemas).reduce((acc, key) => { |
| 56 | + const currentSchema = schemas[key]; |
| 57 | + const modifiedSchema = { |
| 58 | + $id: path.join(folder_path, currentSchema["$id"]), |
| 59 | + ...currentSchema, |
| 60 | + }; |
| 61 | + acc[key] = modifiedSchema; |
| 62 | + return acc; |
| 63 | + }, folders_schemas); |
| 64 | + } else { |
| 65 | + var new_folder_path = folder_path + "/" + folder.name; |
| 66 | + var test = return_json_schema(folder.path, new_folder_path, project_name); |
| 67 | + folders_schemas[folder.name] = test; |
| 68 | + } |
| 69 | + }); |
| 70 | + return folders_schemas; |
| 71 | +} |
| 72 | + |
| 73 | +if (fs.existsSync(outputFile)) { |
| 74 | + fs.unlinkSync(outputFile); |
| 75 | +} |
| 76 | + |
| 77 | +const finalJson = {}; |
| 78 | +finalJson[project_name] = return_json_schema(directoryPath, "", project_name); |
| 79 | + |
| 80 | +fs.writeFileSync(outputFile, JSON.stringify(finalJson, null, 2)); |
0 commit comments