Skip to content

Commit

Permalink
generate_json script
Browse files Browse the repository at this point in the history
  • Loading branch information
unkcpz committed Aug 31, 2023
1 parent 80d6dbe commit c90de49
Show file tree
Hide file tree
Showing 3 changed files with 147 additions and 0 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,3 +74,10 @@ We highly appreciate help in keeping the configurations up to date and adding ne
1. Fork this repository
2. Add your computer / code
3. Create a Pull Request

In order to test if your configuration and file/folder structure is correct, you can generate the JSON files locally:

```bash
pip install -r scripts/requirements.txt
python scripts/generate_json.py
```
140 changes: 140 additions & 0 deletions scripts/generate_json.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
import os
import yaml
import json
import copy
from pathlib import Path

# Define the path to the repo folder.
root_path = Path.resolve(Path(__file__) / '..' / '..')

# Define the path to exclude from the parsing.
EXCLUDE_DOMAIN_FOLDER_LIST = ['scripts', '.github', '.git']
EXCLUDE_COMPUTER_FOLDER_LIST = ['default', 'codes']


def main():
# Extract all the data.
# Get all the available domains and initialize the final dictionary.
data = dict()

for domain_path in Path.iterdir(root_path):
domain = domain_path.name

if domain in EXCLUDE_DOMAIN_FOLDER_LIST:
# skip the excluded folders
continue

domain_data = dict()
for computer_path in Path.iterdir(domain_path):
computer = computer_path.name

if computer in EXCLUDE_COMPUTER_FOLDER_LIST:
# skip the excluded folders
continue

domain_data[computer] = dict()
domain_data[computer]["computer"] = parse_config(computer_path)
domain_data[computer]["codes"] = parse_config(computer_path / "codes")

# Extract the default computer
# XXX check if default symlink exist
link = domain_path / 'default'
domain_data['default'] = str(link)

data[domain] = domain_data



def parse_config(folder_path, exclude=None):
if not os.path.isdir(folder_path):
raise TypeError(f"{folder_path} must point to an existing folder.")

result = {}

if exclude:
file_list = [ fname for fname in os.listdir(folder_path) if fname not in exclude ]
else:
file_list = os.listdir(folder_path)

for fname in file_list:
with open(folder_path/fname) as yaml_file:
if fname.endswith(YAML_SUFFIX):
fname = fname[:-len(YAML_SUFFIX)]
else:
raise ValueError(f"The file {fname} has unsupported extension. Please use '{YAML_SUFFIX}'")

result[fname] = yaml.load(yaml_file, Loader=yaml.FullLoader)

return result



## Prepare the config db for aiida 2.x data type entry points compatibility
#def update_to_v2_entry_points(comp_setup: dict) -> dict:
# """
# v1 -> v2 with attach `core.` in front for transport and scheduler.
# This is a mutate function will change the value of argument `comp_setup`
# """
# new_comp_setup = {}
# for key, value in comp_setup.items():
# if key in ['transport', 'scheduler']:
# new_comp_setup[key] = f"core.{value}"
# else:
# new_comp_setup[key] = value
#
# return new_comp_setup
#
#
#final_dict_v2 = copy.deepcopy(final_dict)
#
## Loop over or the fields and update to compatible with aiida 2.x entry points name
#for domain in final_dict_v2:
# for computer in final_dict_v2[domain]:
# if computer != 'default':
# final_dict_v2[domain][computer]["computer-setup"] = update_to_v2_entry_points(final_dict_v2[domain][computer]["computer-setup"])
#
## Prepare the config db for aiida 2.1 data type entry points compatibility
#def update_to_v2_1_entry_points(code_setup: dict) -> dict:
# """
# v2 -> v2.1
# orm.Code to orm.InstalledCode setup.
# """
# # New parameters of InstalledCode setup
# new_code_setup = {}
# for key, value in code_setup.items():
# if key == "input_plugin":
# new_code_setup["default_calc_job_plugin"] = value
# elif key == "on_computer":
# continue
# elif key == "remote_abs_path":
# new_code_setup["filepath_executable"] = value
# else:
# new_code_setup[key] = value
#
# return new_code_setup
#
#final_dict_v2_1 = copy.deepcopy(final_dict_v2)
#
## Loop over or the fields and update to compatible with aiida 2.x entry points name
#for domain in final_dict_v2_1:
# for computer in final_dict_v2_1[domain]:
# if computer != 'default':
# for key in final_dict_v2_1[domain][computer]:
# if key not in ['computer-setup', 'computer-configure']:
# final_dict_v2_1[domain][computer][key] = update_to_v2_1_entry_points(final_dict_v2_1[domain][computer][key])
#
## Store the extracted information as a single JSON file.
#os.mkdir(folder_path/'out')
#with open(folder_path/'out/database.json', 'w') as filep:
# json.dump(final_dict, filep, indent=4)
#
## Store the v2 compatible entry points
#with open(folder_path/'out/database_v2.json', 'w') as filep:
# json.dump(final_dict_v2, filep, indent=4)
#
## Store the v2.1 compatible code data
#with open(folder_path/'out/database_v2_1.json', 'w') as filep:
# json.dump(final_dict_v2_1, filep, indent=4)

if "__main__" == __name__:
main()
Empty file added scripts/requirements.txt
Empty file.

0 comments on commit c90de49

Please sign in to comment.