Skip to content

Json import viewer #12

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Mar 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions .dockerignore

This file was deleted.

54 changes: 54 additions & 0 deletions .github/workflows/CD.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
name: CD

on:
push:
branches: [master, next, json_import_viewer]

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.TOKEN }}
- name: Python Semantic Release
uses: python-semantic-release/python-semantic-release@master
id: semantic-release
with:
github_token: ${{ secrets.TOKEN }}
- name: Build
run: |
python3 -m pip install --upgrade build
python3 -m build
- name: Upload PYPI
if: steps.semantic-release.outputs.released == 'true'
run: |
python3 -m pip install twine
python3 -m twine upload --repository pypi dist/* -u __token__ -p ${{ secrets.PYPI_TOKEN }}
- name: Setup NODE
uses: actions/setup-node@v3
with:
registry-url: "https://registry.npmjs.org"
node-version: "20.x"
- name: Upload NPM
if: steps.semantic-release.outputs.released == 'true'
run: |
pwd
cd ${{ github.workspace }}
npm i
npm run json
jq '.version="${{steps.semantic-release.outputs.version}}"' package.json > temp && mv temp package.json
cat package.json
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Merge master -> next
if: github.ref == 'refs/heads/master'
uses: devmasx/merge-branch@master
with:
type: now
from_branch: master
target_branch: next
github_token: ${{ github.token }}
13 changes: 0 additions & 13 deletions .github/workflows/docker-images.yml

This file was deleted.

2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,5 @@ venv/
.vscode/
__pycache__/
data/
/node_modules
schemas.json
6 changes: 6 additions & 0 deletions .pypirc
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[distutils]
index-servers = pypi

[pypi]
username = __token__
password = <PyPI token>
11 changes: 0 additions & 11 deletions Dockerfile

This file was deleted.

1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
# OpenGeodeWeb-Viewer

OpenSource Python framework for remote visualisation
81 changes: 81 additions & 0 deletions generate_schemas.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
const fs = require("fs");
const path = require("path");
const glob = require("glob");
const process = require("process");
const { log } = require("console");

const findDirectoryPath = (targetDirectoryName) => {
const pathToCheck = path.join(process.cwd(), targetDirectoryName);
const folders = fs
.readdirSync(pathToCheck, { withFileTypes: true })
.filter(
(folder) => folder.isDirectory() && !folder.name.endsWith(".egg-info")
)
.map((folder) => ({
name: folder.name,
path: path.join(pathToCheck, folder.name),
}));
const rpcDirectory = path.join(folders[0].path, "rpc");
return [rpcDirectory, folders[0].name];
};

const [directoryPath, project_name] = findDirectoryPath("src/");

const outputFile = path.join(process.cwd(), "schemas.json");

function return_json_schema(directoryPath, folder_path, project_name) {
const folders = fs
.readdirSync(path.normalize(directoryPath), { withFileTypes: true })
.filter((folder) => folder.isDirectory())
.map((folder) => ({
name: folder.name,
path: path.join(directoryPath, folder.name),
}));
var folders_schemas = {};
folders.forEach((folder) => {
if (folder.name == "schemas") {
const jsonFiles = glob.sync(path.join(folder.path, "**/*.json"));
var schemas = {};
jsonFiles.forEach((filePath) => {
try {
const fileContent = fs.readFileSync(filePath, "utf8");
var jsonData = JSON.parse(fileContent);
var filename = filePath
.replace(/^.*[\\/]/, "")
.replace(/\.[^/.]+$/, "");
var rpc = jsonData["rpc"];
jsonData["$id"] = project_name + folder_path + "." + rpc;
schemas[filename] = jsonData;
} catch (error) {
console.error(
`Erreur lors de la lecture du fichier ${filePath}:`,
error
);
}
});
folders_schemas = Object.keys(schemas).reduce((acc, key) => {
const currentSchema = schemas[key];
const modifiedSchema = {
$id: path.join(folder_path, currentSchema["$id"]),
...currentSchema,
};
acc[key] = modifiedSchema;
return acc;
}, folders_schemas);
} else {
var new_folder_path = folder_path + "/" + folder.name;
var test = return_json_schema(folder.path, new_folder_path, project_name);
folders_schemas[folder.name] = test;
}
});
return folders_schemas;
}

if (fs.existsSync(outputFile)) {
fs.unlinkSync(outputFile);
}

const finalJson = {};
finalJson[project_name] = return_json_schema(directoryPath, "", project_name);

fs.writeFileSync(outputFile, JSON.stringify(finalJson, null, 2));
Loading