Skip to content

Commit 1922ae3

Browse files
committed
import json
1 parent 6ab1ce7 commit 1922ae3

25 files changed

+1259
-260
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,5 @@ venv/
55
.vscode/
66
__pycache__/
77
data/
8+
/node_modules
9+
schemas.json

.pypirc

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[distutils]
2+
index-servers = pypi
3+
4+
[pypi]
5+
username = __token__
6+
password = <PyPI token>

function.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
import os
2+
import json
3+
import jsonschema
4+
from jsonschema import validate
5+
6+
7+
def validate_schemas(params, schema):
8+
try:
9+
validate(instance=params, schema=schema)
10+
except jsonschema.exceptions.ValidationError as e:
11+
print(400, f"Validation error: {e.message}")

generate_schemas.js

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
const fs = require("fs");
2+
const path = require("path");
3+
const glob = require("glob");
4+
const process = require("process");
5+
6+
const findDirectoryPath = (targetDirectoryName) => {
7+
const pathToCheck = path.join(process.cwd(), targetDirectoryName);
8+
const folders = fs
9+
.readdirSync(pathToCheck, { withFileTypes: true })
10+
.filter(
11+
(folder) => folder.isDirectory() && !folder.name.endsWith(".egg-info")
12+
)
13+
.map((folder) => ({
14+
name: folder.name,
15+
path: path.join(pathToCheck, folder.name),
16+
}));
17+
const routesDirectory = path.join(folders[0].path, "routes");
18+
return [routesDirectory, folders[0].name];
19+
};
20+
21+
const [directoryPath, project_name] = findDirectoryPath("src/");
22+
23+
const outputFile = path.join(process.cwd(), "schemas.json");
24+
25+
function return_json_schema(directoryPath, folder_path, project_name) {
26+
const folders = fs
27+
.readdirSync(path.normalize(directoryPath), { withFileTypes: true })
28+
.filter((folder) => folder.isDirectory())
29+
.map((folder) => ({
30+
name: folder.name,
31+
path: path.join(directoryPath, folder.name),
32+
}));
33+
var folders_schemas = {};
34+
folders.forEach((folder) => {
35+
if (folder.name == "schemas") {
36+
const jsonFiles = glob.sync(path.join(folder.path, "**/*.json"));
37+
var schemas = {};
38+
jsonFiles.forEach((filePath) => {
39+
try {
40+
const fileContent = fs.readFileSync(filePath, "utf8");
41+
var jsonData = JSON.parse(fileContent);
42+
var filename = filePath
43+
.replace(/^.*[\\/]/, "")
44+
.replace(/\.[^/.]+$/, "");
45+
var route = jsonData["route"];
46+
jsonData["$id"] = project_name + folder_path + route;
47+
schemas[filename] = jsonData;
48+
} catch (error) {
49+
console.error(
50+
`Erreur lors de la lecture du fichier ${filePath}:`,
51+
error
52+
);
53+
}
54+
});
55+
folders_schemas = Object.keys(schemas).reduce((acc, key) => {
56+
const currentSchema = schemas[key];
57+
const modifiedSchema = {
58+
$id: path.join(folder_path, currentSchema["$id"]),
59+
...currentSchema,
60+
};
61+
acc[key] = modifiedSchema;
62+
return acc;
63+
}, folders_schemas);
64+
} else {
65+
var new_folder_path = folder_path + "/" + folder.name;
66+
var test = return_json_schema(folder.path, new_folder_path, project_name);
67+
folders_schemas[folder.name] = test;
68+
}
69+
});
70+
return folders_schemas;
71+
}
72+
73+
if (fs.existsSync(outputFile)) {
74+
fs.unlinkSync(outputFile);
75+
}
76+
77+
const finalJson = {};
78+
finalJson[project_name] = return_json_schema(directoryPath, "", project_name);
79+
80+
fs.writeFileSync(outputFile, JSON.stringify(finalJson, null, 2));

0 commit comments

Comments
 (0)