Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial YAML configuration support #334

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions sites/NOAA_GFDL/default_gfdl.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
case_list:
- CASENAME: ESM4_historical
model: GFDL-ESM4
convention: CMIP
FIRSTYR: 2000
LASTYR: 2004
CASE_ROOT_DIR: "/archive/Thomas.Jackson/ESM4/DECK/ESM4_historical_D1/gfdl.ncrc4-intel16-prod-openmp/pp/"
pod_list:
#- Wheeler_Kiladis
- EOF_500hPa
#- MJO_suite
#- MJO_teleconnection
#- precip_diurnal_cycle
#- ENSO_RWS
#- SM_ET_coupling
#- convective_transition_diag
#- MJO_prop_amp
#- ENSO_MSE
#- temp_extremes_distshape
#- precip_buoy_diag
#- eulerian_storm_track
MODEL_DATA_ROOT: "$MDTF_TMPDIR/inputdata/model"
OBS_DATA_REMOTE: "/home/oar.gfdl.mdtf/mdtf/inputdata/obs_data"
OBS_DATA_ROOT: "$MDTF_TMPDIR/inputdata/obs_data"
WORKING_DIR: "$MDTF_TMPDIR/wkdir"
OUTPUT_DIR: "$MDTF_TMPDIR/mdtf_out"
GFDL_PPAN_TEMP: "$TMPDIR"
GFDL_WS_TEMP: "/net2/$USER/tmp"
frepp: false
ignore_component: false
data_manager: GFDL_PP
file_transfer_timeout: 900
keep_temp: false
environment_manager: conda
conda_root: "/home/oar.gfdl.mdtf/miniconda3"
conda_env_root: "/home/oar.gfdl.mdtf/miniconda3/envs"
venv_root: "./envs/venv"
r_lib_root: "./envs/r_libs"
save_ps: false
save_nc: false
make_variab_tar: false
overwrite: false
verbose: 1
test_mode: false
dry_run: false
8 changes: 4 additions & 4 deletions src/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -1095,11 +1095,11 @@ def init_user_defaults(self):
raise ValueError()
if not str_:
return
# try to determine if file is json
if 'json' in os.path.splitext(path)[1].lower():
# assume config_file a JSON dict of option:value pairs.
# try to determine if file is json or yml
if any(x in os.path.splitext(path)[1].lower() for x in ["json", "yml"]):
# assume config_file a JSON or YAML dict of option:value pairs.
try:
d = util.parse_json(str_)
d = util.parse_serialization_stream(str_)
self.file_case_list = d.pop('case_list', [])
d = {canonical_arg_name(k): v for k,v in d.items()}
config.user_defaults.update(d)
Expand Down
2 changes: 1 addition & 1 deletion src/util/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from .filesystem import (
abbreviate_path, resolve_path, recursive_copy,
check_executable, find_files, check_dir, bump_version, strip_comments,
parse_json, read_json, find_json, write_json, pretty_print_json,
parse_serialization_stream, read_json, find_json, write_json, pretty_print_json,
append_html_template
# is_subpath,
)
Expand Down
2 changes: 1 addition & 1 deletion src/util/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def __str__(self):
class PodConfigError(PodExceptionBase):
"""Exception raised if we can't parse info in a POD's settings.jsonc file.
(Covers issues with the file format/schema; malformed JSONC will raise a
:py:class:`~json.JSONDecodeError` when :func:`~util.parse_json` attempts to
:py:class:`~json.JSONDecodeError` when :func:`~util._parse_json` attempts to
parse the file.
"""
_error_str = "Couldn't parse the settings.jsonc file"
Expand Down
26 changes: 22 additions & 4 deletions src/util/filesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import re
import shutil
import string
import yaml
from . import basic
from . import exceptions

Expand Down Expand Up @@ -306,7 +307,23 @@ def strip_comments(str_, delimiter=None):
new_str = '\n'.join([s for s in lines if (s and not s.isspace())])
return (new_str, line_nos)

def parse_json(str_):

def parse_serialization_stream(str_):
"""Top-level wrapper to read either JSON or YAML input stream"""
try:
result = _parse_yaml(str_)
except Exception as _:
result = _parse_json(str_)

return result


def _parse_yaml(str_):
"""Parse YAML file and return as an OrderedDict"""
return collections.OrderedDict(yaml.safe_load(str_))


def _parse_json(str_):
"""Parse JSONC (JSON with ``//``-comments) string *str\_* into a Python object.
Comments are discarded. Wraps standard library :py:func:`json.loads`.

Expand All @@ -333,9 +350,10 @@ def _pos_from_lc(lineno, colno, str_):
)
except UnicodeDecodeError as exc:
raise json.JSONDecodeError(
msg=f"parse_json received UnicodeDecodeError:\n{exc}",
msg=f"_parse_json received UnicodeDecodeError:\n{exc}",
doc=strip_str, pos=0
)

return parsed_json

def read_json(file_path, log=_log):
Expand All @@ -346,7 +364,7 @@ def read_json(file_path, log=_log):
*file_path*.

Returns:
dict: data contained in the file, as parsed by :func:`parse_json`.
dict: data contained in the file, as parsed by :func:`_parse_json`.

Execution exits with error code 1 on all other exceptions.
"""
Expand All @@ -360,7 +378,7 @@ def read_json(file_path, log=_log):
# something more serious than missing file
_log.critical("Caught exception when trying to read %s: %r", file_path, exc)
exit(1)
return parse_json(str_)
return _parse_json(str_)

def find_json(dir_, file_name, exit_if_missing=True, log=_log):
"""Reads a JSONC file *file_name* anywhere within the root directory *dir\_*.
Expand Down
16 changes: 8 additions & 8 deletions src/util/tests/test_filesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def test_bump_version_setver(self, mock_exists):
# self.assertEqual(ver, f[2])

class TestJSONC(unittest.TestCase):
def test_parse_json_basic(self):
def test_parse_serialization_stream(self):
s = """{
"a" : "test_string",
"b" : 3,
Expand All @@ -121,7 +121,7 @@ def test_parse_json_basic(self):
}
}
"""
d = util.parse_json(s)
d = util.parse_serialization_stream(s)
self.assertEqual(set(d.keys()), set(['a','b','c','d','e']))
self.assertEqual(d['a'], "test_string")
self.assertEqual(d['b'], 3)
Expand Down Expand Up @@ -149,7 +149,7 @@ def test_parse_json_comments(self):
} // comment 7

"""
d = util.parse_json(s)
d = util.parse_serialization_stream(s)
self.assertEqual(set(d.keys()), set(['a','b // c','e','f']))
self.assertEqual(d['a'], 1)
self.assertEqual(d['b // c'], "// d x ////")
Expand All @@ -160,7 +160,7 @@ def test_parse_json_syntax_lineno(self):
s = 'SYNTAX_ERROR\n{"a": 1, "e": false}'
try:
flag = False
_ = util.parse_json(textwrap.dedent(s))
_ = util.parse_serialization_stream(textwrap.dedent(s))
except json.JSONDecodeError as exc:
flag = True
self.assertEqual(exc.lineno, 1)
Expand All @@ -171,7 +171,7 @@ def test_parse_json_syntax_lineno(self):
# missing ',' triggers on first " in "e"
try:
flag = False
_ = util.parse_json(textwrap.dedent(s))
_ = util.parse_serialization_stream(textwrap.dedent(s))
except json.JSONDecodeError as exc:
flag = True
self.assertEqual(exc.lineno, 1)
Expand All @@ -182,7 +182,7 @@ def test_parse_json_syntax_lineno(self):
# missing ',' triggers on first " in "e"
try:
flag = False
_ = util.parse_json(textwrap.dedent(s))
_ = util.parse_serialization_stream(textwrap.dedent(s))
except json.JSONDecodeError as exc:
flag = True
self.assertEqual(exc.lineno, 2)
Expand All @@ -201,7 +201,7 @@ def test_parse_json_syntax_lineno(self):
"""
try:
flag = False
_ = util.parse_json(textwrap.dedent(s))
_ = util.parse_serialization_stream(textwrap.dedent(s))
except json.JSONDecodeError as exc:
flag = True
self.assertEqual(exc.lineno, 9)
Expand All @@ -219,7 +219,7 @@ def test_parse_json_syntax_lineno(self):
# missing ',' triggers on first " in "e"
try:
flag = False
_ = util.parse_json(textwrap.dedent(s))
_ = util.parse_serialization_stream(textwrap.dedent(s))
except json.JSONDecodeError as exc:
flag = True
self.assertEqual(exc.lineno, 6)
Expand Down