Skip to content

Commit cd3b4a0

Browse files
Merge pull request #12 from Geode-solutions/json_import_viewer
Json import viewer
2 parents 6ab1ce7 + ea38690 commit cd3b4a0

29 files changed

+1024
-45
lines changed

.dockerignore

Lines changed: 0 additions & 4 deletions
This file was deleted.

.github/workflows/CD.yml

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
name: CD
2+
3+
on:
4+
push:
5+
branches: [master, next, json_import_viewer]
6+
7+
jobs:
8+
build:
9+
runs-on: ubuntu-latest
10+
steps:
11+
- name: Checkout
12+
uses: actions/checkout@v4
13+
with:
14+
fetch-depth: 0
15+
token: ${{ secrets.TOKEN }}
16+
- name: Python Semantic Release
17+
uses: python-semantic-release/python-semantic-release@master
18+
id: semantic-release
19+
with:
20+
github_token: ${{ secrets.TOKEN }}
21+
- name: Build
22+
run: |
23+
python3 -m pip install --upgrade build
24+
python3 -m build
25+
- name: Upload PYPI
26+
if: steps.semantic-release.outputs.released == 'true'
27+
run: |
28+
python3 -m pip install twine
29+
python3 -m twine upload --repository pypi dist/* -u __token__ -p ${{ secrets.PYPI_TOKEN }}
30+
- name: Setup NODE
31+
uses: actions/setup-node@v3
32+
with:
33+
registry-url: "https://registry.npmjs.org"
34+
node-version: "20.x"
35+
- name: Upload NPM
36+
if: steps.semantic-release.outputs.released == 'true'
37+
run: |
38+
pwd
39+
cd ${{ github.workspace }}
40+
npm i
41+
npm run json
42+
jq '.version="${{steps.semantic-release.outputs.version}}"' package.json > temp && mv temp package.json
43+
cat package.json
44+
npm publish
45+
env:
46+
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
47+
- name: Merge master -> next
48+
if: github.ref == 'refs/heads/master'
49+
uses: devmasx/merge-branch@master
50+
with:
51+
type: now
52+
from_branch: master
53+
target_branch: next
54+
github_token: ${{ github.token }}

.github/workflows/docker-images.yml

Lines changed: 0 additions & 13 deletions
This file was deleted.

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,5 @@ venv/
55
.vscode/
66
__pycache__/
77
data/
8+
/node_modules
9+
schemas.json

.pypirc

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[distutils]
2+
index-servers = pypi
3+
4+
[pypi]
5+
username = __token__
6+
password = <PyPI token>

Dockerfile

Lines changed: 0 additions & 11 deletions
This file was deleted.

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
# OpenGeodeWeb-Viewer
2+
23
OpenSource Python framework for remote visualisation

generate_schemas.js

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
const fs = require("fs");
2+
const path = require("path");
3+
const glob = require("glob");
4+
const process = require("process");
5+
const { log } = require("console");
6+
7+
const findDirectoryPath = (targetDirectoryName) => {
8+
const pathToCheck = path.join(process.cwd(), targetDirectoryName);
9+
const folders = fs
10+
.readdirSync(pathToCheck, { withFileTypes: true })
11+
.filter(
12+
(folder) => folder.isDirectory() && !folder.name.endsWith(".egg-info")
13+
)
14+
.map((folder) => ({
15+
name: folder.name,
16+
path: path.join(pathToCheck, folder.name),
17+
}));
18+
const rpcDirectory = path.join(folders[0].path, "rpc");
19+
return [rpcDirectory, folders[0].name];
20+
};
21+
22+
const [directoryPath, project_name] = findDirectoryPath("src/");
23+
24+
const outputFile = path.join(process.cwd(), "schemas.json");
25+
26+
function return_json_schema(directoryPath, folder_path, project_name) {
27+
const folders = fs
28+
.readdirSync(path.normalize(directoryPath), { withFileTypes: true })
29+
.filter((folder) => folder.isDirectory())
30+
.map((folder) => ({
31+
name: folder.name,
32+
path: path.join(directoryPath, folder.name),
33+
}));
34+
var folders_schemas = {};
35+
folders.forEach((folder) => {
36+
if (folder.name == "schemas") {
37+
const jsonFiles = glob.sync(path.join(folder.path, "**/*.json"));
38+
var schemas = {};
39+
jsonFiles.forEach((filePath) => {
40+
try {
41+
const fileContent = fs.readFileSync(filePath, "utf8");
42+
var jsonData = JSON.parse(fileContent);
43+
var filename = filePath
44+
.replace(/^.*[\\/]/, "")
45+
.replace(/\.[^/.]+$/, "");
46+
var rpc = jsonData["rpc"];
47+
jsonData["$id"] = project_name + folder_path + "." + rpc;
48+
schemas[filename] = jsonData;
49+
} catch (error) {
50+
console.error(
51+
`Erreur lors de la lecture du fichier ${filePath}:`,
52+
error
53+
);
54+
}
55+
});
56+
folders_schemas = Object.keys(schemas).reduce((acc, key) => {
57+
const currentSchema = schemas[key];
58+
const modifiedSchema = {
59+
$id: path.join(folder_path, currentSchema["$id"]),
60+
...currentSchema,
61+
};
62+
acc[key] = modifiedSchema;
63+
return acc;
64+
}, folders_schemas);
65+
} else {
66+
var new_folder_path = folder_path + "/" + folder.name;
67+
var test = return_json_schema(folder.path, new_folder_path, project_name);
68+
folders_schemas[folder.name] = test;
69+
}
70+
});
71+
return folders_schemas;
72+
}
73+
74+
if (fs.existsSync(outputFile)) {
75+
fs.unlinkSync(outputFile);
76+
}
77+
78+
const finalJson = {};
79+
finalJson[project_name] = return_json_schema(directoryPath, "", project_name);
80+
81+
fs.writeFileSync(outputFile, JSON.stringify(finalJson, null, 2));

0 commit comments

Comments
 (0)