From eec3239eecb510130ba520ec1c7c655b30f81fca Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 14 Aug 2024 00:11:19 -0400 Subject: [PATCH 01/61] #141 Add fremake model yaml info to standard model yaml created (for pp) --- fre/make/tests/AM5_example/am5.yaml | 132 ++++++++++++++++++++++++++-- 1 file changed, 126 insertions(+), 6 deletions(-) diff --git a/fre/make/tests/AM5_example/am5.yaml b/fre/make/tests/AM5_example/am5.yaml index 4b7bf8d4..bed0b733 100644 --- a/fre/make/tests/AM5_example/am5.yaml +++ b/fre/make/tests/AM5_example/am5.yaml @@ -1,6 +1,126 @@ -platformYaml: platforms.yaml -compileYaml: compile.yaml -release: f1a1r1 -INTEL: "intel-classic" -FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "1yr" + - &PP_AMIP_CHUNK384 "1yr" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "5yr" + - &PP_CPLD_CHUNK_B "20yr" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +shared: + # compile information + compile: + compileYaml: &compile_yaml "compile.yaml" + platformYaml: "platforms.yaml" + + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - "yaml_include/pp.c96_amip.yaml" + compile: + - *compile_yaml + - name: "c96L65_am5f7b12r1_pdclim1850F" + pp: + - "yaml_include/pp.c96_clim.yaml" + compile: + - *compile_yaml + - name: "c96L65_am5f7b12r1_pdclim2010F" + pp: + - "yaml_include/pp.c96_clim.yaml" + compile: + - *compile_yaml + - name: "c96L65_am5f7b12r1_pdclim2010AERF" + pp: + - "yaml_include/pp.c96_clim.yaml" + compile: + - *compile_yaml + - name: "c384L65_am5f7b12r1_amip" + pp: + - "yaml_include/pp.c384_amip.yaml" + compile: + - *compile_yaml + - name: "c384L65_am5f7b12r1_pdclim2010F" + pp: + - "yaml_include/pp.c384_clim.yaml" + compile: + - *compile_yaml + - name: "c384L65_am5f7b12r1_pdclim1850F" + pp: + - "yaml_include/pp.c384_clim.yaml" + compile: + - *compile_yaml + - name: "c384L65_am5f7b12r1_pdclim2010AERF" + pp: + - "yaml_include/pp.c384_clim.yaml" + compile: + - *compile_yaml + - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" + pp: + - "yaml_include/pp.c384_amip.yaml" + - "yaml_include/pp.om4.yaml" + compile: + - *compile_yaml + - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" + pp: + - "yaml_include/pp.c96_amip.yaml" + - "yaml_include/pp.om4.yaml" + compile: + - *compile_yaml + - name: "c96L65_am5f7b12r1_amip_cosp" + pp: + - "yaml_include/pp.c96_amip.yaml" + compile: + - *compile_yaml From b7df975424c25a9a70895e5b248515e3e7dd5cde Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 14 Aug 2024 00:12:25 -0400 Subject: [PATCH 02/61] #141 Put compile info under `compile` section - this is for later organization and parsing when yamls are combined --- fre/make/tests/AM5_example/compile.yaml | 133 ++++++++++++------------ 1 file changed, 67 insertions(+), 66 deletions(-) diff --git a/fre/make/tests/AM5_example/compile.yaml b/fre/make/tests/AM5_example/compile.yaml index 5200599c..5f9a361b 100644 --- a/fre/make/tests/AM5_example/compile.yaml +++ b/fre/make/tests/AM5_example/compile.yaml @@ -1,66 +1,67 @@ -experiment: "am5" -container_addlibs: -baremetal_linkerflags: -src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" - branch: "2022.01" - cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" - otherFlags: "$(FMSincludes)" - - component: "am5_phys" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" - branch: "2022.01" - otherFlags: "$(FMSincludes)" - - component: "GFDL_atmos_cubed_sphere" - requires: ["FMS", "am5_phys"] - repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" - branch: "2022.01" - paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", - "GFDL_atmos_cubed_sphere/model", - "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", - "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", - "GFDL_atmos_cubed_sphere/tools", - "GFDL_atmos_cubed_sphere/GFDL_tools"] - otherFlags: "$(FMSincludes)" - - component: "atmos_drivers" - requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] - repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE" - branch: "2022.01" - paths: ["atmos_drivers/coupled"] - otherFlags: "$(FMSincludes)" - - component: "ice_sis" - requires: ["FMS", "ice_param", "mom6"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" - branch: "2021.02" - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "ice_param" - repo: "https://github.com/NOAA-GFDL/ice_param.git" - cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" - branch: "2021.02" - requires: ["FMS", "mom6"] - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "land_lad2" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" - branch: "2022.01" - branch: "land_lad2_2021.02" - doF90Cpp: True - cppdefs: "-DINTERNAL_FILE_NML" - otherFlags: "$(FMSincludes)" - - component: "mom6" - requires: ["FMS"] - paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] - branch: ["2021.02","dev/gfdl/2018.04.06"] - repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] - makeOverrides: 'OPENMP=""' - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "FMScoupler" - paths: ["FMScoupler/full", "FMScoupler/shared"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - branch: "2022.01" - requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] - otherFlags: "$(FMSincludes) $(momIncludes)" +compile: + experiment: "am5" + container_addlibs: + baremetal_linkerflags: + src: + - component: "FMS" + repo: "https://github.com/NOAA-GFDL/FMS.git" + cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" + branch: "2022.01" + cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" + otherFlags: *FMSincludes + - component: "am5_phys" + requires: ["FMS"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" + branch: "2022.01" + otherFlags: *FMSincludes + - component: "GFDL_atmos_cubed_sphere" + requires: ["FMS", "am5_phys"] + repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" + cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" + branch: "2022.01" + paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", + "GFDL_atmos_cubed_sphere/model", + "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", + "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", + "GFDL_atmos_cubed_sphere/tools", + "GFDL_atmos_cubed_sphere/GFDL_tools"] + otherFlags: *FMSincludes + - component: "atmos_drivers" + requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] + repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" + cppdefs: "-DSPMD -DCLIMATE_NUDGE" + branch: "2022.01" + paths: ["atmos_drivers/coupled"] + otherFlags: *FMSincludes + - component: "ice_sis" + requires: ["FMS", "ice_param", "mom6"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" + branch: "2021.02" + otherFlags: !join [*FMSincludes, " ", *momIncludes] + - component: "ice_param" + repo: "https://github.com/NOAA-GFDL/ice_param.git" + cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" + branch: "2021.02" + requires: ["FMS", "mom6"] + otherFlags: !join [*FMSincludes," ", *momIncludes] + - component: "land_lad2" + requires: ["FMS"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" + branch: "2022.01" + branch: "land_lad2_2021.02" + doF90Cpp: True + cppdefs: "-DINTERNAL_FILE_NML" + otherFlags: *FMSincludes + - component: "mom6" + requires: ["FMS"] + paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] + branch: ["2021.02","dev/gfdl/2018.04.06"] + repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] + makeOverrides: 'OPENMP=""' + otherFlags: !join [*FMSincludes, " ", *momIncludes] + - component: "FMScoupler" + paths: ["FMScoupler/full", "FMScoupler/shared"] + repo: "https://github.com/NOAA-GFDL/FMScoupler.git" + branch: "2022.01" + requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] + otherFlags: !join [*FMSincludes, " ", *momIncludes] From c4248e381014678836a5208daaf26c689cff3f09 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 14 Aug 2024 00:13:24 -0400 Subject: [PATCH 03/61] #141 Redefine `fre_properties` using reusable yaml variables --- fre/make/tests/AM5_example/platforms.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fre/make/tests/AM5_example/platforms.yaml b/fre/make/tests/AM5_example/platforms.yaml index 02b7d222..14d4dfff 100644 --- a/fre/make/tests/AM5_example/platforms.yaml +++ b/fre/make/tests/AM5_example/platforms.yaml @@ -2,18 +2,18 @@ platforms: - name: ncrc5.intel compiler: intel modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL,".mk"] modelRoot: ${HOME}/fremake_canopy/test - name: ncrc5.intel23 compiler: intel modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] modelRoot: ${HOME}/fremake_canopy/test - name: hpcme.2023 compiler: intel From 035067412e7b32298d37ea985cf5cdc0ce98abcf Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 14 Aug 2024 13:46:12 -0400 Subject: [PATCH 04/61] #141 Change `-e` option to `--execute` - another `-e` option was added to represent the experiment name --- fre/make/fremake.py | 47 ++++++++++++++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/fre/make/fremake.py b/fre/make/fremake.py index 10e437b7..fa875774 100644 --- a/fre/make/fremake.py +++ b/fre/make/fremake.py @@ -7,6 +7,7 @@ yamlfile_opt_help = """Experiment yaml compile FILE """ +experiment_opt_help = """Name of experiment""" platform_opt_help = """Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions """ @@ -39,6 +40,11 @@ def makeCli(): type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements +@click.option("-e", + "--experiment", + type = str, + help = experiment_opt_help, + required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -70,7 +76,7 @@ def makeCli(): is_flag = True, help = verbose_opt_help) @click.pass_context -def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): +def run_fremake(context, experiment, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): """ - Perform all fremake functions to run checkout and compile model""" context.forward(fremake_run) @@ -81,6 +87,11 @@ def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements +@click.option("-e", + "--experiment", + type = str, + help = experiment_opt_help, + required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -102,8 +113,7 @@ def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel "--no-parallel-checkout", is_flag = True, help = no_parallel_checkout_opt_help) -@click.option("-e", - "--execute", +@click.option("--execute", is_flag = True, default = False, help = "Use this to run the created checkout script.") @@ -112,7 +122,7 @@ def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel is_flag = True, help = verbose_opt_help) @click.pass_context -def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): +def create_checkout(context,yamlfile,experiment,platform,target,no_parallel_checkout,jobs,execute,verbose): """ - Write the checkout script """ context.forward(checkout_create) @@ -123,6 +133,11 @@ def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,e type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements +@click.option("-e", + "--experiment", + type = str, + help = experiment_opt_help, + required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -134,7 +149,7 @@ def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,e help = target_opt_help, required = True) @click.pass_context -def create_makefile(context,yamlfile,platform,target): +def create_makefile(context,experiment,yamlfile,platform,target): """ - Write the makefile """ context.forward(makefile_create) @@ -146,6 +161,11 @@ def create_makefile(context,yamlfile,platform,target): type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements +@click.option("-e", + "--experiment", + type = str, + help = experiment_opt_help, + required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -167,8 +187,7 @@ def create_makefile(context,yamlfile,platform,target): type = int, metavar = '', default = 1, help = parallel_opt_help) -@click.option("-e", - "--execute", +@click.option("--execute", is_flag = True, default = False, help = "Use this to run the created checkout script.") @@ -177,18 +196,21 @@ def create_makefile(context,yamlfile,platform,target): is_flag = True, help = verbose_opt_help) @click.pass_context -def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbose): +def create_compile(context,experiment,yamlfile,platform,target,jobs,parallel,execute,verbose): """ - Write the compile script """ context.forward(compile_create) - - @makeCli.command @click.option("-y", "--yamlfile", type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements +@click.option("-e", + "--experiment", + type = str, + help = experiment_opt_help, + required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -199,12 +221,11 @@ def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbos type = str, help = target_opt_help, required = True) -@click.option("-e", - "--execute", +@click.option("--execute", is_flag = True, help = "Build Dockerfile that has been generated by create-docker.") @click.pass_context -def create_dockerfile(context,yamlfile,platform,target,execute): +def create_dockerfile(context,experiment,yamlfile,platform,target,execute): """ - Write the dockerfile """ context.forward(dockerfile_create) From 1b445e949513d71c45ae7abff2567ce10c696646 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 14 Aug 2024 13:47:44 -0400 Subject: [PATCH 05/61] #141 Fix combining and parsing yamls - combine model, compile, and platform yaml - parse combined yaml --- fre/make/gfdlfremake/platformfre.py | 18 ++- fre/make/gfdlfremake/yamlfre.py | 176 +++++++++++++++++++++------- 2 files changed, 139 insertions(+), 55 deletions(-) diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index df37aa56..9bb337ed 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -4,17 +4,12 @@ class platforms (): ## \param self The platform yaml object ## \param fname The path to the platform yaml file ## \param v the fre variables defined in the model Yaml - def __init__(self,fname,v): - with open(fname, 'r') as file: - self.yaml = yaml.safe_load(v.freVarSub(file.read())) + def __init__(self,yamlFile): #,v): + self.yaml = yamlFile + ## Check the yaml for errors/omissions - try: - self.yaml["platforms"] - except: - print(fname+" must have a platforms key\n") - raise ## Loop through the platforms - for p in self.yaml["platforms"]: + for p in self.yaml: ## Check the platform name try: p["name"] @@ -90,9 +85,10 @@ def __init__(self,fname,v): p["mkTemplate"] except: raise ValueError("The non-container platform "+p["name"]+" must specify a mkTemplate \n") + ## \brief Checks if the platform yaml has the named platform def hasPlatform(self,name): - for p in self.yaml["platforms"]: + for p in self.yaml: if p["name"] == name: return True return False @@ -101,6 +97,6 @@ def getPlatformsYaml(self): return self.yaml ## \brief Get the platform information from the name of the platform def getPlatformFromName(self,name): - for p in self.yaml["platforms"]: + for p in self.yaml: if p["name"] == name: return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 52adcdcb..0c097980 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -1,41 +1,120 @@ +import os +from pathlib import Path +import shutil import yaml import json from jsonschema import validate, ValidationError, SchemaError from . import platformfre +###################################### +##COULD HAVE YAML_COMBINE AS A SEPARATE TOOL FOR COMPILATION AND PP YAMLS, THEN YAMLFRE.PY WOULD JUST CLEAN AND GET COMPILE/PLATFORM INFO IT NEEDS (AND DO CHECKS/VALIDAITON) +###################################### +def join_constructor(loader, node): + """ + Allows FRE properties defined + in main yaml to be concatenated. + """ + seq = loader.construct_sequence(node) + return ''.join([str(i) for i in seq]) + +## Combine model yaml, compile yaml, and platform yaml into one yaml file +## \param self the compile Yaml object +def combineYaml(self): + # name of combined yaml + comb_compileyaml=f"fullcompile_{self.p}-{self.t}.yaml" + + # copy model yaml info into combined yaml + with open(comb_compileyaml,'w+') as f1: + f1.write(f'name: &name "{self.n}"\n') + f1.write(f'platform: &platform "{self.p}"\n') + f1.write(f'target: &target "{self.t}"\n\n') + with open(self.modelfile,'r') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + + # open combined compile yaml + with open(comb_compileyaml,'r') as f3: + cy = yaml.load(f3,Loader=yaml.Loader) + + # Check if exp name given is actually valid experiment listed in combined yaml + exp_list = [] + for i in cy.get("experiments"): + exp_list.append(i.get("name")) + + if self.n not in exp_list: + raise Exception(f"{self.n} is not in the list of experiments") + + # Extract compile yaml path for exp. provided + # if experiment matches name in list of experiments in yaml, extract file path + for i in cy.get("experiments"): + if self.n == i.get("name"): + compileyaml=i.get("compile") + + # set platform yaml + platformyaml = cy.get("shared").get("compile").get("platformYaml") + + # copy compile and platform yaml info into combined yaml + if compileyaml is not None: + with open(comb_compileyaml,'a') as f1: + for i in compileyaml: + with open(i,'r') as f2: + f1.write("\n### COMPILE INFO ###\n") + shutil.copyfileobj(f2,f1) + + # combine platform yaml + if platformyaml is not None: + with open(platformyaml,'r') as f22: + f1.write("\n### PLATFORM INFO ###\n") + shutil.copyfileobj(f22,f1) + + + # open combined compile yaml + full_cy = parseCompile(comb_compileyaml) + + # Clean combined yaml to validate + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_cy.keys(): + del full_cy[kc] + with open(comb_compileyaml,'w') as f: + yaml.safe_dump(full_cy,f,sort_keys=False) + + return full_cy + ## Open the yaml file and parse as fremakeYaml ## \param fname the name of the yaml file to parse -## \param v the FRE yaml varaibles (FRE properties) -def parseCompile(fname,v): -## Open the yaml file and parse as fremakeYaml - with open(fname, 'r') as file: - # Substitute the variables and read the updated yaml string - y = yaml.safe_load(v.freVarSub(file.read())) +def parseCompile(fname): +# Open the yaml file and parse as fremakeYaml + with open(fname, 'r') as yamlfile: + y = yaml.load(yamlfile,Loader=yaml.Loader) + return y + ## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will ## set a default value for the variable ## \param var A variable in the yaml ## \param val a default value for var ## \param req if true, the variable is required in the yaml and an exception will be raised ## \param err An error message to print if the variable is required and doesn't exist -def yamlVarCheck(var,val="",req=False,err="error"): - try: - var - except: - if req: - print (err) - raise - else: - var = val +#def yamlVarCheck(var,val="",req=False,err="error"): +# try: +# var +# except: +# if req: +# print (err) +# raise +# else: +# var = val + ## This will read the compile yaml for FRE and then fill in any of the missing non-required variables class compileYaml(): ## Read get the compile yaml and fill in the missing pieces ## \param self the compile Yaml object ## \yamlFile The path to the compile yaml file -## \v The FRE variables set in the model yaml - def __init__(self,yamlFile,v): - self.file = yamlFile - self.yaml = parseCompile(self.file,v) + def __init__(self,yamlFile): + self.yaml = yamlFile + ## Check the yaml for required things ## Check for required experiment name try: @@ -53,13 +132,11 @@ def __init__(self,yamlFile,v): self.yaml["baremetal_linkerflags"] except: self.yaml["baremetal_linkerflags"]="" -# ## Set up the srcDir -# self.src = modelRoot + "/" + self.yaml["experiment"] + "/src" ## Check for required src try: self.yaml["src"] except: - print("You must set a src to specify the sources in "+self.yaml["experiment"]+"\n") + print("You must set a src to specify the sources in modelRoot/"+self.yaml["experiment"]+"\n") raise ## Loop through the src array for c in self.yaml['src']: @@ -129,38 +206,49 @@ def getCompileYaml(self): ## \description This will take the models yaml file which has a list of the sub yaml files and combine them into the ## full freyaml that can be used and checked # platformYaml: platforms.yaml -# layoutYaml: # compileYaml: compile.yaml -# experiments: class freyaml(): ## \param self The freyaml object ## \param modelFileName The name of the model yaml file -## \param v the FRE yaml varaibles object (FRE properties) - def __init__(self,modelFileName,v): - self.freyaml = {} - self.modelfile = modelFileName - with open(self.modelfile, 'r') as file: - self.modelyaml = yaml.safe_load(v.freVarSub(file.read())) - self.freyaml.update(self.modelyaml) - self.compilefile = self.modelyaml["compileYaml"] - self.compile = compileYaml(self.compilefile,v) - self.compileyaml = self.compile.getCompileYaml() - self.freyaml.update(self.compileyaml) - self.platformsfile = self.modelyaml["platformYaml"] - self.platforms = platformfre.platforms(self.platformsfile,v) - self.platformsyaml = self.platforms.getPlatformsYaml() - self.freyaml.update(self.platformsyaml) +## \param name experiment name +## \param platform platform used for compilation +## \param target target used in compilation + def __init__(self,modelFileName,name,platform,target): + #self.freyaml = {} + self.modelfile = Path(modelFileName) + self.n = name + self.p = platform + self.t = target + + yaml.add_constructor('!join', join_constructor) + + #parse + self.combined = combineYaml(self) + ## Validate the YAML fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) schema_path = os.path.join(fremake_package_dir, 'schema.json') with open(schema_path, 'r') as f: s = f.read() schema = json.loads(s) - validate(instance=self.freyaml, schema=schema) -## \return platform yaml dictionary - def getPlatformsYaml(self): - return self.platformsyaml -## \return compile yaml dictionary + validate(instance=self.combined,schema=schema) + print("\nCOMBINED YAML VALID") + + #get compile info + self.compiledict = self.combined.get("compile") + self.compile = compileYaml(self.compiledict) + self.compileyaml = self.compile.getCompileYaml() + + #get platform info + self.platformsdict = self.combined.get("platforms") + self.platforms = platformfre.platforms(self.platformsdict) + self.platformsyaml = self.platforms.getPlatformsYaml() + +## Returns the compile yaml def getCompileYaml(self): return self.compileyaml + +## Returns the compile yaml + def getPlatformsYaml(self): + return self.platformsyaml From eb67d23c9b2187954d4f546a1ef0c651da3fdf35 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 14 Aug 2024 13:52:33 -0400 Subject: [PATCH 06/61] #141 Rework code that saves parsed yaml info - added `-e` experiment and `--execute` options - redefine variables with parsed information - createCheckout: fix checkout script running if `--execute` used and checkour script already created --- fre/make/createCheckout.py | 42 ++++++++++++++++++++++++-------------- fre/make/createCompile.py | 23 +++++++++++++-------- fre/make/createDocker.py | 25 +++++++++++++++-------- fre/make/createMakefile.py | 21 +++++++++++++------ fre/make/runFremake.py | 25 ++++++++++++++--------- 5 files changed, 88 insertions(+), 48 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index e86ac006..37d1f03b 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -3,15 +3,15 @@ from .gfdlfremake import varsfre, platformfre, yamlfre, checkout, targetfre import click import os +import subprocess import logging import sys @click.command() -def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): +def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,jobs,execute,verbose): # Define variables yml = yamlfile - ps = platform - ts = target + name = experiment run = execute jobs = str(jobs) pcheck = no_parallel_checkout @@ -34,12 +34,11 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v plist = platform tlist = target - ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) - ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) - fremakeYaml = modelYaml.getCompileYaml() + for platformName in plist: + for targetName in tlist: + modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) + fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets for targetName in tlist: @@ -53,7 +52,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the source directory for the platform @@ -67,13 +66,26 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v freCheckout = checkout.checkout("checkout.sh",srcDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) - click.echo("\nCheckout script created at " + srcDir + "/checkout.sh" + "\n") + print("\nCheckout script created in "+ srcDir + "/checkout.sh \n") - # Run the checkout script - if run: - freCheckout.run() + # Run the checkout script + if run == True: + freCheckout.run() + else: + sys.exit() else: - sys.exit() + print("\nCheckout script PREVIOUSLY created in "+ srcDir + "/checkout.sh \n") + if run == True: + os.chmod(srcDir+"/checkout.sh", 0o744) + try: + subprocess.run(args=[srcDir+"/checkout.sh"], check=True) + except: + print("\nThere was an error with the checkout script "+srcDir+"/checkout.sh.", + "\nTry removing test folder: " + modelRoot +"\n") + raise + else: + sys.exit() + else: ## Run the checkout script image="ecpe4s/noaa-intel-prototype:2023.09.25" @@ -82,7 +94,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) - click.echo("\nCheckout script created at " + srcDir + "/checkout.sh" + "\n") + click.echo("\nCheckout script created at " + tmpDir + "/checkout.sh" + "\n") if __name__ == "__main__": diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index 94f39b82..539fb3d1 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -8,11 +8,10 @@ import sys @click.command() -def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): +def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,verbose): # Define variables yml = yamlfile - ps = platform - ts = target + name = experiment nparallel = parallel jobs = str(jobs) run = execute @@ -30,12 +29,18 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): plist = platform tlist = target - ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) +# ## Get the variables in the model yaml +# freVars = varsfre.frevars(yml) +# +# ## Open the yaml file and parse as fremakeYaml +# modelYaml = yamlfre.freyaml(yml,freVars) +# fremakeYaml = modelYaml.getCompileYaml() ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) - fremakeYaml = modelYaml.getCompileYaml() + for platformName in plist: + for targetName in tlist: + modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) + fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets for targetName in tlist: @@ -49,8 +54,10 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + ## Make the bldDir based on the modelRoot, the platform, and the target srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index 577ab42f..a65dad2b 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -6,7 +6,7 @@ import sys @click.command() -def dockerfile_create(yamlfile, platform, target, execute): +def dockerfile_create(yamlfile, experiment, platform, target, execute): srcDir="src" checkoutScriptName = "checkout.sh" baremetalRun = False # This is needed if there are no bare metal runs @@ -14,13 +14,20 @@ def dockerfile_create(yamlfile, platform, target, execute): plist = platform tlist = target yml = yamlfile + name = experiment run = execute - ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) +# ## Get the variables in the model yaml +# freVars = varsfre.frevars(yml) +# ## Open the yaml file and parse as fremakeYaml +# modelYaml = yamlfre.freyaml(yml,freVars) +# fremakeYaml = modelYaml.getCompileYaml() + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) - fremakeYaml = modelYaml.getCompileYaml() + for platformName in plist: + for targetName in tlist: + modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) + fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] ## Loop through platforms and targets @@ -30,8 +37,8 @@ def dockerfile_create(yamlfile, platform, target, execute): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - + raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target @@ -67,8 +74,8 @@ def dockerfile_create(yamlfile, platform, target, execute): dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") currDir = os.getcwd() - click.echo("\ntmpDir created at " + currDir + "/tmp") - click.echo("Dockerfile created at " + currDir + "\n") + click.echo("\ntmpDir created in " + currDir + "/tmp") + click.echo("Dockerfile created in " + currDir + "Dockerfile\n") if run: dockerBuild.build(containerBuild, containerRun) diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 03eaaa01..5395ce32 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -6,7 +6,7 @@ import logging @click.command() -def makefile_create(yamlfile,platform,target): +def makefile_create(yamlfile,experiment,platform,target): srcDir="src" checkoutScriptName = "checkout.sh" baremetalRun = False # This is needed if there are no bare metal runs @@ -14,13 +14,20 @@ def makefile_create(yamlfile,platform,target): plist = platform tlist = target yml = yamlfile + name = experiment + +# ## Get the variables in the model yaml +# freVars = varsfre.frevars(yml) +# ## Open the yaml file and parse as fremakeYaml +# modelYaml = yamlfre.freyaml(yml,freVars) +# fremakeYaml = modelYaml.getCompileYaml() - ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) - fremakeYaml = modelYaml.getCompileYaml() + for platformName in plist: + for targetName in tlist: + modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) + fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] ## Loop through platforms and targets @@ -30,8 +37,10 @@ def makefile_create(yamlfile,platform,target): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + ## Make the bldDir based on the modelRoot, the platform, and the target srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 49072b2f..6a98f885 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -15,11 +15,10 @@ @click.command() -def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): +def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_parallel_checkout, verbose): yml = yamlfile - ps = platform - ts = target + name = experiment nparallel = parallel jobs = str(jobs) pcheck = no_parallel_checkout @@ -43,12 +42,17 @@ def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout plist = platform tlist = target - ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) - +# ## Get the variables in the model yaml +# freVars = varsfre.frevars(yml) +# +# ## Open the yaml file and parse as fremakeYaml +# modelYaml = yamlfre.freyaml(yml,freVars) +# fremakeYaml = modelYaml.getCompileYaml() ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) - fremakeYaml = modelYaml.getCompileYaml() + for platformName in plist: + for targetName in tlist: + modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) + fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets for targetName in tlist: @@ -62,8 +66,9 @@ def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the checkout script if iscontainer == False: From 32ad88b849fd069fd948955fad11be40626327ac Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 26 Aug 2024 16:13:02 -0400 Subject: [PATCH 07/61] #141 Remove combining yamls functionality from `yamlfre.py` - this is done in the separate tool `combine-yamls` --- fre/make/gfdlfremake/yamlfre.py | 111 ++++---------------------------- 1 file changed, 12 insertions(+), 99 deletions(-) diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 0c097980..02b23812 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -6,88 +6,12 @@ from jsonschema import validate, ValidationError, SchemaError from . import platformfre -###################################### -##COULD HAVE YAML_COMBINE AS A SEPARATE TOOL FOR COMPILATION AND PP YAMLS, THEN YAMLFRE.PY WOULD JUST CLEAN AND GET COMPILE/PLATFORM INFO IT NEEDS (AND DO CHECKS/VALIDAITON) -###################################### -def join_constructor(loader, node): - """ - Allows FRE properties defined - in main yaml to be concatenated. - """ - seq = loader.construct_sequence(node) - return ''.join([str(i) for i in seq]) - -## Combine model yaml, compile yaml, and platform yaml into one yaml file -## \param self the compile Yaml object -def combineYaml(self): - # name of combined yaml - comb_compileyaml=f"fullcompile_{self.p}-{self.t}.yaml" - - # copy model yaml info into combined yaml - with open(comb_compileyaml,'w+') as f1: - f1.write(f'name: &name "{self.n}"\n') - f1.write(f'platform: &platform "{self.p}"\n') - f1.write(f'target: &target "{self.t}"\n\n') - with open(self.modelfile,'r') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - - # open combined compile yaml - with open(comb_compileyaml,'r') as f3: - cy = yaml.load(f3,Loader=yaml.Loader) - - # Check if exp name given is actually valid experiment listed in combined yaml - exp_list = [] - for i in cy.get("experiments"): - exp_list.append(i.get("name")) - - if self.n not in exp_list: - raise Exception(f"{self.n} is not in the list of experiments") - - # Extract compile yaml path for exp. provided - # if experiment matches name in list of experiments in yaml, extract file path - for i in cy.get("experiments"): - if self.n == i.get("name"): - compileyaml=i.get("compile") - - # set platform yaml - platformyaml = cy.get("shared").get("compile").get("platformYaml") - - # copy compile and platform yaml info into combined yaml - if compileyaml is not None: - with open(comb_compileyaml,'a') as f1: - for i in compileyaml: - with open(i,'r') as f2: - f1.write("\n### COMPILE INFO ###\n") - shutil.copyfileobj(f2,f1) - - # combine platform yaml - if platformyaml is not None: - with open(platformyaml,'r') as f22: - f1.write("\n### PLATFORM INFO ###\n") - shutil.copyfileobj(f22,f1) - - - # open combined compile yaml - full_cy = parseCompile(comb_compileyaml) - - # Clean combined yaml to validate - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_cy.keys(): - del full_cy[kc] - with open(comb_compileyaml,'w') as f: - yaml.safe_dump(full_cy,f,sort_keys=False) - - return full_cy - ## Open the yaml file and parse as fremakeYaml ## \param fname the name of the yaml file to parse def parseCompile(fname): # Open the yaml file and parse as fremakeYaml with open(fname, 'r') as yamlfile: - y = yaml.load(yamlfile,Loader=yaml.Loader) + y = yaml.safe_load(yamlfile) return y @@ -210,30 +134,19 @@ def getCompileYaml(self): class freyaml(): ## \param self The freyaml object -## \param modelFileName The name of the model yaml file -## \param name experiment name -## \param platform platform used for compilation -## \param target target used in compilation - def __init__(self,modelFileName,name,platform,target): - #self.freyaml = {} - self.modelfile = Path(modelFileName) - self.n = name - self.p = platform - self.t = target - - yaml.add_constructor('!join', join_constructor) - - #parse - self.combined = combineYaml(self) +## \param combinedyaml The name of the combined yaml file + def __init__(self,combinedyaml): + # Parse + self.combined = parseCompile(combinedyaml) ## Validate the YAML - fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) - schema_path = os.path.join(fremake_package_dir, 'schema.json') - with open(schema_path, 'r') as f: - s = f.read() - schema = json.loads(s) - validate(instance=self.combined,schema=schema) - print("\nCOMBINED YAML VALID") +# fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) +# schema_path = os.path.join(fremake_package_dir, 'schema.json') +# with open(schema_path, 'r') as f: +# s = f.read() +# schema = json.loads(s) +# validate(instance=self.combined,schema=schema) +# print("\nCOMBINED YAML VALID") #get compile info self.compiledict = self.combined.get("compile") From a4c8df153e5a37bcdbe92e6cf678819ebb087894 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 26 Aug 2024 16:14:47 -0400 Subject: [PATCH 08/61] #141 Update parsing information from combined yaml --- fre/make/createCheckout.py | 9 +++------ fre/make/createCompile.py | 13 ++----------- fre/make/createDocker.py | 39 ++++++++++++++++---------------------- fre/make/createMakefile.py | 13 ++----------- fre/make/runFremake.py | 12 ++---------- 5 files changed, 25 insertions(+), 61 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index 62e9cba2..89a4f96f 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -35,10 +35,8 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job tlist = target ## Open the yaml file and parse as fremakeYaml - for platformName in plist: - for targetName in tlist: - modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) - fremakeYaml = modelYaml.getCompileYaml() + modelYaml = yamlfre.freyaml(yml) + fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets for targetName in tlist: @@ -52,7 +50,7 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + raise SystemExit (platformName + " does not exist in platforms.yaml") #modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the source directory for the platform @@ -87,7 +85,6 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job sys.exit() else: - ## Run the checkout script image="ecpe4s/noaa-intel-prototype:2023.09.25" bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index a3addcae..e8cd13d9 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -30,18 +30,9 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver plist = platform tlist = target -# ## Get the variables in the model yaml -# freVars = varsfre.frevars(yml) -# -# ## Open the yaml file and parse as fremakeYaml -# modelYaml = yamlfre.freyaml(yml,freVars) -# fremakeYaml = modelYaml.getCompileYaml() - ## Open the yaml file and parse as fremakeYaml - for platformName in plist: - for targetName in tlist: - modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) - fremakeYaml = modelYaml.getCompileYaml() + modelYaml = yamlfre.freyaml(yml) + fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets for targetName in tlist: diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index 4fb88f8c..355ce17e 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -17,17 +17,9 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): name = experiment run = execute -# ## Get the variables in the model yaml -# freVars = varsfre.frevars(yml) -# ## Open the yaml file and parse as fremakeYaml -# modelYaml = yamlfre.freyaml(yml,freVars) -# fremakeYaml = modelYaml.getCompileYaml() - ## Open the yaml file and parse as fremakeYaml - for platformName in plist: - for targetName in tlist: - modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) - fremakeYaml = modelYaml.getCompileYaml() + modelYaml = yamlfre.freyaml(yml) + fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] ## Loop through platforms and targets @@ -49,17 +41,17 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName - freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], - libs = fremakeYaml["container_addlibs"], - srcDir = srcDir, - bldDir = bldDir, - mkTemplatePath = mkTemplate, - tmpDir = tmpDir) - - # Loop through components and send the component name and requires for the Makefile - for c in fremakeYaml['src']: - freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) - freMakefile.writeMakefile() +# freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], +# libs = fremakeYaml["container_addlibs"], +# srcDir = srcDir, +# bldDir = bldDir, +# mkTemplatePath = mkTemplate, +# tmpDir = tmpDir) +# +# # Loop through components and send the component name and requires for the Makefile +# for c in fremakeYaml['src']: +# freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) +# freMakefile.writeMakefile() dockerBuild = buildDocker.container(base = image, exp = fremakeYaml["experiment"], @@ -67,7 +59,8 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): RUNenv = RUNenv, target = targetObject) dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") - dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir()+"/linkline.sh") + dockerBuild.writeDockerfileMakefile(tmpDir+"/Makefile", tmpDir+"/linkline.sh") + for c in fremakeYaml['src']: dockerBuild.writeDockerfileMkmf(c) @@ -75,7 +68,7 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): currDir = os.getcwd() click.echo("\ntmpDir created in " + currDir + "/tmp") - click.echo("Dockerfile created in " + currDir + "Dockerfile\n") + click.echo("Dockerfile created in " + currDir +"\n") if run: dockerBuild.build(containerBuild, containerRun) diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 86217679..36f5dd00 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -15,18 +15,9 @@ def makefile_create(yamlfile,experiment,platform,target): yml = yamlfile name = experiment - -# ## Get the variables in the model yaml -# freVars = varsfre.frevars(yml) -# ## Open the yaml file and parse as fremakeYaml -# modelYaml = yamlfre.freyaml(yml,freVars) -# fremakeYaml = modelYaml.getCompileYaml() - ## Open the yaml file and parse as fremakeYaml - for platformName in plist: - for targetName in tlist: - modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) - fremakeYaml = modelYaml.getCompileYaml() + modelYaml = yamlfre.freyaml(yml) + fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] ## Loop through platforms and targets diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 42e44c1b..8ad9d52c 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -42,17 +42,9 @@ def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_paral plist = platform tlist = target -# ## Get the variables in the model yaml -# freVars = varsfre.frevars(yml) -# -# ## Open the yaml file and parse as fremakeYaml -# modelYaml = yamlfre.freyaml(yml,freVars) -# fremakeYaml = modelYaml.getCompileYaml() ## Open the yaml file and parse as fremakeYaml - for platformName in plist: - for targetName in tlist: - modelYaml = yamlfre.freyaml(yml,name,platformName,targetName) - fremakeYaml = modelYaml.getCompileYaml() + modelYaml = yamlfre.freyaml(yml) + fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets for targetName in tlist: From 86e5b72f7e4d505d2c4515f88e22e2fd0f89332e Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 26 Aug 2024 16:16:38 -0400 Subject: [PATCH 09/61] #141 Update runscript for container - certain variables were overwritten when bind mode is implemented - these variables are saved in `BACKUP` variables --- fre/make/gfdlfremake/buildDocker.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 92f418b0..65d26e9a 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -117,8 +117,9 @@ def writeDockerfileMkmf(self, c): def writeRunscript(self,RUNenv,containerRun,runOnDisk): #create runscript in tmp - create spack environment, install necessary packages, self.createscript = ["#!/bin/bash \n", + "export BACKUP_LD_LIBRARY_PATH=$LD_LIBRARY\n", "# Set up spack loads\n", - RUNenv[0]+"\n"] + RUNenv[0]+"\n"] with open(runOnDisk,"w") as f: f.writelines(self.createscript) f.write("# Load spack packages\n") @@ -130,6 +131,7 @@ def writeRunscript(self,RUNenv,containerRun,runOnDisk): self.spackloads = "spack load "+l+"\n" f.write(self.spackloads) + f.write("export LD_LIBRARY_PATH=$BACKUP_LD_LIBRARY_PATH:$LD_LIBRARY_PATH\n") f.write("# Run executable\n") f.write(self.bld+"/"+self.e+".x\n") #copy runscript into container in dockerfile From 975094b9bdde40913b9e283ed5731718963a0b97 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 26 Aug 2024 16:23:20 -0400 Subject: [PATCH 10/61] #141 Update platforms yaml path --- fre/make/tests/AM5_example/am5.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/make/tests/AM5_example/am5.yaml b/fre/make/tests/AM5_example/am5.yaml index bed0b733..db6bcd91 100644 --- a/fre/make/tests/AM5_example/am5.yaml +++ b/fre/make/tests/AM5_example/am5.yaml @@ -42,7 +42,7 @@ shared: # compile information compile: compileYaml: &compile_yaml "compile.yaml" - platformYaml: "platforms.yaml" + platformYaml: "yaml_include/platforms.yaml" # directories shared across tools directories: &shared_directories From b612f8084c6b9fa541dc21e2f05e643f04cf8983 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 26 Aug 2024 16:23:58 -0400 Subject: [PATCH 11/61] #141 Add test yaml files --- .../{ => yaml_include}/platforms.yaml | 4 +- .../AM5_example/yaml_include/pp.c96_amip.yaml | 88 +++++++++++++++++++ 2 files changed, 90 insertions(+), 2 deletions(-) rename fre/make/tests/AM5_example/{ => yaml_include}/platforms.yaml (82%) create mode 100644 fre/make/tests/AM5_example/yaml_include/pp.c96_amip.yaml diff --git a/fre/make/tests/AM5_example/platforms.yaml b/fre/make/tests/AM5_example/yaml_include/platforms.yaml similarity index 82% rename from fre/make/tests/AM5_example/platforms.yaml rename to fre/make/tests/AM5_example/yaml_include/platforms.yaml index 14d4dfff..7e1b9f49 100644 --- a/fre/make/tests/AM5_example/platforms.yaml +++ b/fre/make/tests/AM5_example/yaml_include/platforms.yaml @@ -5,7 +5,7 @@ platforms: modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL,".mk"] + mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" modelRoot: ${HOME}/fremake_canopy/test - name: ncrc5.intel23 compiler: intel @@ -13,7 +13,7 @@ platforms: modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] + mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" modelRoot: ${HOME}/fremake_canopy/test - name: hpcme.2023 compiler: intel diff --git a/fre/make/tests/AM5_example/yaml_include/pp.c96_amip.yaml b/fre/make/tests/AM5_example/yaml_include/pp.c96_amip.yaml new file mode 100644 index 00000000..117c66c6 --- /dev/null +++ b/fre/make/tests/AM5_example/yaml_include/pp.c96_amip.yaml @@ -0,0 +1,88 @@ +# local reusable variable overrides +fre_properties: + - &custom_interp "180,360" + +# directory overrides +#c96_amip_directories: +directories: + <<: *shared_directories + ptmp_dir: "/ptmp/$USER" + pp_grid_spec: *GRID_SPEC96 + +#c96_amip_postprocess: +postprocess: + # pp setting overrides + settings: + <<: *shared_settings + pp_start: *ANA_AMIP_START + pp_stop: *ANA_AMIP_END + pp_chunk_a: *PP_AMIP_CHUNK96 + pp_components: "atmos atmos_scalar" + switches: + <<: *shared_switches + do_statics: False + + # main pp instructions + components: + - type: "atmos_cmip" + sources: "atmos_month_cmip atmos_8xdaily_cmip atmos_daily_cmip" + sourceGrid: "cubedsphere" + xyInterp: *custom_interp + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos" + sources: "atmos_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_level_cmip" + sources: "atmos_level_cmip" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_level" + sources: "atmos_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_month_aer" + sources: "atmos_month_aer" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'atmos' + - type: "atmos_diurnal" + sources: "atmos_diurnal" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_scalar" + sources: "atmos_scalar" + - type: "aerosol_cmip" + xyInterp: *PP_XYINTERP96 + sources: "aerosol_month_cmip" + sourceGrid: "cubedsphere" + interpMethod: "conserve_order1" + inputRealm: 'atmos' + - type: "land" + sources: "land_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'land' + - type: "land_cmip" + sources: "land_month_cmip" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'land' + - type: "tracer_level" + sources: "atmos_tracer" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'atmos' From 03db8ba876083b9f78b379049f1a3cb65fb81c72 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 27 Aug 2024 13:39:26 -0400 Subject: [PATCH 12/61] #141 Fix up pylint messages --- fre/make/gfdlfremake/buildBaremetal.py | 199 +++++++++------- fre/make/gfdlfremake/buildDocker.py | 317 ++++++++++++++----------- fre/make/gfdlfremake/checkout.py | 249 ++++++++++--------- fre/make/gfdlfremake/makefilefre.py | 247 ++++++++++--------- fre/make/gfdlfremake/platformfre.py | 181 +++++++------- fre/make/gfdlfremake/targetfre.py | 142 ++++++----- fre/make/gfdlfremake/yamlfre.py | 285 ++++++++++++---------- 7 files changed, 898 insertions(+), 722 deletions(-) diff --git a/fre/make/gfdlfremake/buildBaremetal.py b/fre/make/gfdlfremake/buildBaremetal.py index d3be45f0..49ef532e 100644 --- a/fre/make/gfdlfremake/buildBaremetal.py +++ b/fre/make/gfdlfremake/buildBaremetal.py @@ -2,96 +2,127 @@ ## \date 2023 ## \author Tom Robinson ## \email thomas.robinson@noaa.gov -## \description +## \description import subprocess import os -from . import targetfre -## \brief Called for parallel execution purposes. Runs the builds. -## \param fremakeBuildList the fremakeBuild object list passes by pool.map +#from . import targetfre + def fremake_parallel(fremakeBuildList): - fremakeBuildList.run() + """ + Brief: Called for parallel execution purposes. Runs the builds. + Param: + - fremakeBuildList : fremakeBuild object list passes by pool.map + """ + fremakeBuildList.run() class buildBaremetal(): -## \brief Creates the build script to compile the model -## \param self The buildScript object -## \param exp The experiment name -## \param mkTemplatePath The template used by mkmf to compile the model -## \param srcDir The source directory -## \param bldDir The build directory -## \param modules The list of modules to load before compilation -## \param modulesInit A list of commands with new line characters to initialize modules - def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jobs): - self.e = exp - self.t = target.gettargetName() - self.src = srcDir - self.bld = bldDir - self.make = "make --jobs="+str(jobs)+" "+target.getmakeline_add() #make line - self.mkmf = True - self.template = mkTemplatePath - self.modules = "" - for m in modules: - self.modules = self.modules +" "+ m -## Set up the top portion of the compile script - self.setup=[ "#!/bin/sh -fx \n", - "bld_dir="+self.bld+"/ \n", - "src_dir="+self.src+"/ \n", - "mkmf_template="+self.template+" \n"] - if self.modules != "": - self.setup.extend(modulesInit) #extend because this is a list - self.setup.append("module load "+self.modules+" \n") # Append because this is a single string -## Create the build directory - os.system("mkdir -p "+self.bld) -## Create the compile script - self.f=open(self.bld+"/compile.sh","w") - self.f.writelines(self.setup) -## \brief Adds components to the build script -## \param self The build script object -## \param c Component from the compile yaml - def writeBuildComponents(self, c): -# Shorthand for component - comp = c["component"] -# Make the component directory - self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") -# Get the paths needed for compiling - pstring = "" - for paths in c["paths"]: - pstring = pstring+"$src_dir/"+paths+" " -# Run list_paths - self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") - self.f.write(" cd $bld_dir/"+comp+"\n") -# Create the mkmf line - if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: #Has requirements -#Set up the requirements as a string to inclue after the -o - reqstring = "" - for r in c["requires"]: - reqstring = reqstring+"-I$bld_dir/"+r+" " -#Figure out if we need the preprocessor - if c["doF90Cpp"]: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") -## Finishes and writes the build script -## \param self The buildScript object + """ + Brief: Creates the build script to compile the model + Param: + - self : The buildScript object + - exp : The experiment name + - mkTemplatePath : The template used by mkmf to compile the model + - srcDir : The source directory + - bldDir : The build directory + - modules : The list of modules to load before compilation + - modulesInit : A list of commands with new line characters to initialize modules + """ + def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jobs): + """ + Initialize variables and set-up the compile script. + """ + self.e = exp + self.t = target.gettargetName() + self.src = srcDir + self.bld = bldDir + self.make = "make --jobs="+str(jobs)+" "+target.getmakeline_add() #make line + self.mkmf = True + self.template = mkTemplatePath + self.modules = "" + for m in modules: + self.modules = self.modules +" "+ m + + ## Set up the top portion of the compile script + self.setup=[ "#!/bin/sh -fx \n", + "bld_dir="+self.bld+"/ \n", + "src_dir="+self.src+"/ \n", + "mkmf_template="+self.template+" \n"] + if self.modules != "": + self.setup.extend(modulesInit) #extend - this is a list + self.setup.append("module load "+self.modules+" \n") # Append -this is a single string + + ## Create the build directory + os.system("mkdir -p "+self.bld) + + ## Create the compile script + self.f=open(self.bld+"/compile.sh","w") + self.f.writelines(self.setup) + + def writeBuildComponents(self, c): + """ + Brief: Adds components to the build script + Param: + - self : The build script object + - c : Component from the compile yaml + """ + # Shorthand for component + comp = c["component"] + + # Make the component directory + self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") + + # Get the paths needed for compiling + pstring = "" + for paths in c["paths"]: + pstring = pstring+"$src_dir/"+paths+" " + + # Run list_paths + self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") + self.f.write(" cd $bld_dir/"+comp+"\n") + + # Create the mkmf line + # If this lib doesnt have any code dependencies and + # it requires the preprocessor (no -o and yes --use-cpp) + if c["requires"] == [] and c["doF90Cpp"]: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: #Has requirements + #Set up the requirements as a string to inclue after the -o + reqstring = "" + for r in c["requires"]: + reqstring = reqstring+"-I$bld_dir/"+r+" " + + #Figure out if we need the preprocessor + if c["doF90Cpp"]: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + ##TODO: add targets input - def writeScript(self): - self.f.write("cd "+self.bld+"\n") - self.f.write(self.make+"\n") - self.f.close() -## Run the build script -## \param self The dockerfile object + def writeScript(self): + """ + Brief: Finishes and writes the build script + Param: + - self : The buildScript object + """ + self.f.write("cd "+self.bld+"\n") + self.f.write(self.make+"\n") + self.f.close() + ## TODO run as a batch job on the login cluster - def run(self): + def run(self): + """ + Brief: Run the build script + Param: + - self : The dockerfile object + """ ###### TODO make the Makefile - os.chmod(self.bld+"/compile.sh", 0o744) - command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] - try: - subprocess.run(args=command, check=True) - except: - print("There was an error running "+self.bld+"/compile.sh") - raise - + os.chmod(self.bld+"/compile.sh", 0o744) + command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] + try: + subprocess.run(args=command, check=True) + except: + print("There was an error running "+self.bld+"/compile.sh") + raise diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 65d26e9a..4eb268b7 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -2,154 +2,191 @@ ## \date 2023 ## \author Tom Robinson ## \email thomas.robinson@noaa.gov -## \description +## \description import os -from . import targetfre +#from . import targetfre class container(): -## \brief Opens the Dockerfile for writing -## \param self The dockerfile object -## \param base The docker base image to start from -## \param libs Additional libraries defined by user -## \param exp The experiment name -## \param RUNenv The commands that have to be run at the beginning of a RUN in the dockerfile -## to set up the environment - def __init__(self,base,exp,libs,RUNenv,target): - self.base = base - self.e = exp - self.l = libs - self.src = "/apps/"+self.e+"/src" - self.bld = "/apps/"+self.e+"/exec" - self.mkmf = True - self.target = target - self.template = "/apps/mkmf/templates/hpcme-intel21.mk" - if RUNenv == "": - self.setup = ["RUN \\ \n"] - else: - self.setup = ["RUN "+RUNenv[0]+" \\ \n"] - self.setup - for env in RUNenv[1:]: - self.setup.append(" && "+env+" \\ \n") - if self.l: - for l in self.l: - self.setup.append(" && spack load "+l+" \\ \n") - self.mkmfclone=["RUN cd /apps \\ \n", - " && git clone --recursive https://github.com/NOAA-GFDL/mkmf \\ \n", - " && cp mkmf/bin/* /usr/local/bin \n"] - self.bldsetup=["RUN bld_dir="+self.bld+" \\ \n", - " && src_dir="+self.src+" \\ \n", - " && mkmf_template="+self.template+ " \\ \n"] - self.d=open("Dockerfile","w") - self.d.writelines("FROM "+self.base+" \n") -## \brief writes to the checkout part of the Dockerfile and sets up the compile -## \param self The dockerfile object -## \param cScriptName The name of the checkout script in the container -## \param cOnDisk The relative path to the checkout script on disk - def writeDockerfileCheckout(self, cScriptName, cOnDisk): - self.checkoutPath = "/apps/"+self.e+"/src/"+ cScriptName - self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") - self.d.write("RUN chmod 744 /apps/"+self.e+"/src/checkout.sh \n") - self.d.writelines(self.setup) - self.d.write(" && /apps/"+self.e+"/src/checkout.sh \n") -# Clone mkmf - self.d.writelines(self.mkmfclone) -## Copies the Makefile into the bldDir in the dockerfile -## \param self The dockerfile object -## \param makefileOnDiskPath The path to Makefile on the local disk -## \param linklineonDiskPath The path to the link line script on the local disk - def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): - # Set up the bldDir - # If no additional libraries defined - if self.l == None: - self.bldCreate=["RUN mkdir -p "+self.bld+" \n", - "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n"] - self.d.writelines(self.bldCreate) - # If additional libraries defined - if self.l != None: - self.bldCreate=["RUN mkdir -p "+self.bld+" \n", - "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n", - "RUN chmod +rw "+self.bld+"/Makefile \n", - "COPY "+ linklineonDiskPath +" "+self.bld+"/linkline.sh \n", - "RUN chmod 744 "+self.bld+"/linkline.sh \n"] - self.d.writelines(self.bldCreate) - self.d.writelines(self.setup) - self.d.write(" && "+self.bld+"/linkline.sh \n") + """ + Brief: Opens the Dockerfile for writing + Param: + - self : The dockerfile object + - base : The docker base image to start from + - libs : Additional libraries defined by user + - exp : The experiment name + - RUNenv : The commands that have to be run at + the beginning of a RUN in the dockerfile + to set up the environment + """ + def __init__(self,base,exp,libs,RUNenv,target): + """ + Initialize variables and write to the dockerfile + """ + self.base = base + self.e = exp + self.l = libs + self.src = "/apps/"+self.e+"/src" + self.bld = "/apps/"+self.e+"/exec" + self.mkmf = True + self.target = target + self.template = "/apps/mkmf/templates/hpcme-intel21.mk" + if RUNenv == "": + self.setup = ["RUN \\ \n"] + else: + self.setup = ["RUN "+RUNenv[0]+" \\ \n"] + self.setup + for env in RUNenv[1:]: + self.setup.append(" && "+env+" \\ \n") + if self.l: + for l in self.l: + self.setup.append(" && spack load "+l+" \\ \n") + self.mkmfclone=["RUN cd /apps \\ \n", + " && git clone --recursive https://github.com/NOAA-GFDL/mkmf \\ \n", + " && cp mkmf/bin/* /usr/local/bin \n"] + self.bldsetup=["RUN bld_dir="+self.bld+" \\ \n", + " && src_dir="+self.src+" \\ \n", + " && mkmf_template="+self.template+ " \\ \n"] + self.d=open("Dockerfile","w") + self.d.writelines("FROM "+self.base+" \n") -## \brief Adds components to the build part of the Dockerfile -## \param self The dockerfile object -## \param c Component from the compile yaml - def writeDockerfileMkmf(self, c): -# Set up the compile variables - self.d.writelines(self.bldsetup) -# Shorthand for component - comp = c["component"] -# Make the component directory - self.d.write(" && mkdir -p $bld_dir/"+comp+" \\ \n") -# Get the paths needed for compiling - pstring = "" - for paths in c["paths"]: - pstring = pstring+"$src_dir/"+paths+" " -# Run list_paths - self.d.write(" && list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+" \\ \n") - self.d.write(" && cd $bld_dir/"+comp+" \\ \n") -# Create the mkmf line - if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: #Has requirements -#Set up the requirements as a string to inclue after the -o - reqstring = "" - for r in c["requires"]: - reqstring = reqstring+"-I$bld_dir/"+r+" " -#Figure out if we need the preprocessor - if c["doF90Cpp"]: - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + def writeDockerfileCheckout(self, cScriptName, cOnDisk): + """ + Brief: writes to the checkout part of the Dockerfile and sets up the compile + Param: + - self : The dockerfile object + - cScriptName : The name of the checkout script in the container + - cOnDisk : The relative path to the checkout script on disk + """ + self.checkoutPath = "/apps/"+self.e+"/src/"+ cScriptName + self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") + self.d.write("RUN chmod 744 /apps/"+self.e+"/src/checkout.sh \n") + self.d.writelines(self.setup) + self.d.write(" && /apps/"+self.e+"/src/checkout.sh \n") + # Clone mkmf + self.d.writelines(self.mkmfclone) -## \brief Writes a runscript to set up spack loads/environment in order to run the executable in the container; runscript copied into container -## \param self The dockerfile object -## \param RUNEnv The commands that have to be run at the beginning of a RUN in the dockerfile -## \param containerRun The container platform used with `exec` to run the container; apptainer or singularity used -## \param runOnDisk The path to the run script on the local disk - def writeRunscript(self,RUNenv,containerRun,runOnDisk): - #create runscript in tmp - create spack environment, install necessary packages, - self.createscript = ["#!/bin/bash \n", - "export BACKUP_LD_LIBRARY_PATH=$LD_LIBRARY\n", - "# Set up spack loads\n", - RUNenv[0]+"\n"] - with open(runOnDisk,"w") as f: - f.writelines(self.createscript) - f.write("# Load spack packages\n") - for env in RUNenv[1:]: - f.write(env+"\n") + def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): + """ + Brief: Copies the Makefile into the bldDir in the dockerfile + Param: + - self : The dockerfile object + - makefileOnDiskPath : The path to Makefile on the local disk + - linklineonDiskPath : The path to the link line script on the local disk + """ + # Set up the bldDir + # If no additional libraries defined + if self.l == None: + self.bldCreate=["RUN mkdir -p "+self.bld+" \n", + "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n"] + self.d.writelines(self.bldCreate) + # If additional libraries defined + if self.l != None: + self.bldCreate=["RUN mkdir -p "+self.bld+" \n", + "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n", + "RUN chmod +rw "+self.bld+"/Makefile \n", + "COPY "+ linklineonDiskPath +" "+self.bld+"/linkline.sh \n", + "RUN chmod 744 "+self.bld+"/linkline.sh \n"] + self.d.writelines(self.bldCreate) + self.d.writelines(self.setup) + self.d.write(" && "+self.bld+"/linkline.sh \n") - if self.l: - for l in self.l: + def writeDockerfileMkmf(self, c): + """ + Brief: Adds components to the build part of the Dockerfile + Param: + - self : The dockerfile object + - c : Component from the compile yaml + """ + # Set up the compile variables + self.d.writelines(self.bldsetup) + + # Shorthand for component + comp = c["component"] + + # Make the component directory + self.d.write(" && mkdir -p $bld_dir/"+comp+" \\ \n") + + # Get the paths needed for compiling + pstring = "" + for paths in c["paths"]: + pstring = pstring+"$src_dir/"+paths+" " + + # Run list_paths + self.d.write(" && list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+" \\ \n") + self.d.write(" && cd $bld_dir/"+comp+" \\ \n") + + # Create the mkmf line + if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: #Has requirements + #Set up the requirements as a string to inclue after the -o + reqstring = "" + for r in c["requires"]: + reqstring = reqstring+"-I$bld_dir/"+r+" " + + #Figure out if we need the preprocessor + if c["doF90Cpp"]: + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + + def writeRunscript(self,RUNenv,containerRun,runOnDisk): + """ + Brief: Writes a runscript to set up spack loads/environment + in order to run the executable in the container; + runscript copied into container + Param: + - self : The dockerfile object + - RUNEnv : The commands that have to be run at + the beginning of a RUN in the dockerfile + - containerRun : The container platform used with `exec` + to run the container; apptainer + or singularity used + - runOnDisk : The path to the run script on the local disk + """ + #create runscript in tmp - create spack environment, install necessary packages, + self.createscript = ["#!/bin/bash \n", + "export BACKUP_LD_LIBRARY_PATH=$LD_LIBRARY\n", + "# Set up spack loads\n", + RUNenv[0]+"\n"] + with open(runOnDisk,"w") as f: + f.writelines(self.createscript) + f.write("# Load spack packages\n") + for env in RUNenv[1:]: + f.write(env+"\n") + + if self.l: + for l in self.l: self.spackloads = "spack load "+l+"\n" f.write(self.spackloads) - f.write("export LD_LIBRARY_PATH=$BACKUP_LD_LIBRARY_PATH:$LD_LIBRARY_PATH\n") - f.write("# Run executable\n") - f.write(self.bld+"/"+self.e+".x\n") - #copy runscript into container in dockerfile - self.d.write("COPY "+runOnDisk+" "+self.bld+"/execrunscript.sh\n") - #make runscript executable - self.d.write("RUN chmod 744 "+self.bld+"/execrunscript.sh\n") - #finish the dockerfile - self.d.writelines(self.setup) - self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") - self.d.write('ENTRYPOINT ["/bin/bash"]') - self.d.close() + f.write("export LD_LIBRARY_PATH=$BACKUP_LD_LIBRARY_PATH:$LD_LIBRARY_PATH\n") + f.write("# Run executable\n") + f.write(self.bld+"/"+self.e+".x\n") + #copy runscript into container in dockerfile + self.d.write("COPY "+runOnDisk+" "+self.bld+"/execrunscript.sh\n") + #make runscript executable + self.d.write("RUN chmod 744 "+self.bld+"/execrunscript.sh\n") + #finish the dockerfile + self.d.writelines(self.setup) + self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") + self.d.write('ENTRYPOINT ["/bin/bash"]') + self.d.close() -## Builds the container image for the model -## \param self The dockerfile object -## \param containerBuild The tool used to build the container; docker or podman used -## \param containerRun The container platform used with `exec` to run the container; apptainer or singularity used - def build(self,containerBuild,containerRun): - os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) - os.system("rm -f "+self.e+".tar "+self.e+".sif") - os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) - os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") + def build(self,containerBuild,containerRun): + """ + Brief: Builds the container image for the model + Param: + - self : The dockerfile object + - containerBuild : The tool used to build the container; + docker or podman used + - containerRun : The container platform used with `exec` to + run the container; apptainer or singularity used + """ + os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) + os.system("rm -f "+self.e+".tar "+self.e+".sif") + os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) + os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") diff --git a/fre/make/gfdlfremake/checkout.py b/fre/make/gfdlfremake/checkout.py index 07e13730..3a1ffa9b 100644 --- a/fre/make/gfdlfremake/checkout.py +++ b/fre/make/gfdlfremake/checkout.py @@ -2,119 +2,154 @@ import subprocess ## TODO: Add parallelizations using () and simplify -## Creates the clone lines for the checkout script -## \param file Checkout script file -## \param repo the repo(s) to clone -## \param component Model component name -## \param srcDir The source directory -## \param branch The version to clone/checkout -## \param add Additional instrcutions after the clone -## \param multi True if a component has more than one repo to clone def writeRepo(file,repo,component,srcDir,branch,add,multi,jobs,pc): -## Write message about cloning repo and branch in component - file.write("echo cloning "+repo+" -b "+branch+" into "+srcDir+"/"+component+"\n") -## If this component has multiple repos, clone everything in the component folder -## If it's not multi, then use the component name (comp) as the folder name to clone into - if multi: - file.write("mkdir -p "+component+"\n") - file.write("cd "+component+"\n") - comp="" - else: - comp=component + """ + Brief: Creates the clone lines for the checkout script + Param: + - file Checkout script file + - repo the repo(s) to clone + - component Model component name + - srcDir The source directory + - branch The version to clone/checkout + - add Additional instrcutions after the clone + - multi True if a component has more than one repo to clone + """ + ## Write message about cloning repo and branch in component + file.write("echo cloning "+repo+" -b "+branch+" into "+srcDir+"/"+component+"\n") -## Check if there is a branch/version and then write the clone line; record the pid of that clone in dictionary `pids` if parallel checkout option is defined - if pc: - if branch=="": - file.write("(git clone --recursive --jobs="+jobs+" "+repo+" "+comp+")"+pc+"\n") - if multi: - r=repo.split("/")[4].strip(".git") - file.write("pids+=("+r+"pid:$!)\n") - else: - file.write("pids+=("+comp+"pid:$!)\n") - else: - file.write("(git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+")"+pc+"\n") - if multi: - r=repo.split("/")[4].strip(".git") - file.write("pids+=("+r+"pid:$!)\n") - else: - file.write("pids+=("+comp+"pid:$!)\n") - else: - if branch=="": - file.write("git clone --recursive --jobs="+jobs+" "+repo+" "+comp+"\n") - else: - file.write("git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+"\n") + ## If this component has multiple repos, clone everything in the component folder + ## If it's not multi, then use the component name (comp) as the folder name to clone into + if multi: + file.write("mkdir -p "+component+"\n") + file.write("cd "+component+"\n") + comp="" + else: + comp=component + + ## Check if there is a branch/version and then write the clone line; + ## record the pid of that clone in dictionary `pids` if parallel + ## checkout option is defined + if pc: + if branch=="": + file.write("(git clone --recursive --jobs="+jobs+" "+repo+" "+comp+")"+pc+"\n") + if multi: + r=repo.split("/")[4].strip(".git") + file.write("pids+=("+r+"pid:$!)\n") + else: + file.write("pids+=("+comp+"pid:$!)\n") + else: + file.write("(git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+")"+pc+"\n") + if multi: + r=repo.split("/")[4].strip(".git") + file.write("pids+=("+r+"pid:$!)\n") + else: + file.write("pids+=("+comp+"pid:$!)\n") + else: + if branch=="": + file.write("git clone --recursive --jobs="+jobs+" "+repo+" "+comp+"\n") + else: + file.write("git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+"\n") + + ## Make sure to go back up in the folder structure + if multi: + file.write("cd .. \n") + if add!="": + file.write(add) -## Make sure to go back up in the folder structure - if multi: - file.write("cd .. \n") - if add!="": - file.write(add) - -## Class to create the checkout script class checkout(): -## \brief Opens the checkout script with the specified name -## \param self The checkout script object -## \param fname The file name of the checkout script -## \param srcDir The source directory where fname will be run and source will exist - def __init__(self,fname,srcDir): - self.fname = fname - self.src = srcDir - os.system("mkdir -p "+self.src) -##TODO: Force checkout - os.system("rm -rf "+self.src+"/*") - self.checkoutScript = open(self.src+"/"+fname, 'w') - self.checkoutScript.write("#!/bin/sh -f \n") - self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") -## \brief Writes the contents of the checkout script by looping through the input yaml -## \param self The checkout script object -## \param y The fremake compile yaml - def writeCheckout(self,y,jobs,pc): - self.checkoutScript.write("cd "+self.src +"\n") - for c in y['src']: - if type(c['repo']) is list and type(c['branch']) is list: - for (repo,branch) in zip(c['repo'],c['branch']): - writeRepo(self.checkoutScript,repo,c['component'],self.src,branch,c['additionalInstructions'],True,jobs,pc) - else: - writeRepo(self.checkoutScript,c['repo'],c['component'],self.src,c['branch'],c['additionalInstructions'],False,jobs,pc) -## \brief If pc is defined: Loops through dictionary of pids, waits for each pid individually, writes exit code in `check` list; allows checkoutscript to exit if exit code is not 0; closes the checkout script when writing is done -## \param self The checkout script object - def finish (self,pc): - if pc: - self.checkoutScript.write('for id in ${pids[@]}; do\n wait ${id##*:}\n check+=("clone of ${id%%:*} exited with status $?")\ndone\n') - self.checkoutScript.write('for stat in "${check[@]}"; do\n echo $stat \n if [ ${stat##* } -ne 0 ]; then\n exit ${stat##* }\n fi\ndone') - self.checkoutScript.close() - else: - self.checkoutScript.close() -## \brief Changes the permission on the checkout script and runs it -## \param self The checkout script object + """ + Brief: Class to create the checkout script + """ + def __init__(self,fname,srcDir): + """ + Brief: Opens the checkout script with the specified name + Param: + - self The checkout script object + - fname The file name of the checkout script + - srcDir The source directory where fname will be run and source will exist + """ + self.fname = fname + self.src = srcDir + os.system("mkdir -p "+self.src) + ##TODO: Force checkout + os.system("rm -rf "+self.src+"/*") + self.checkoutScript = open(self.src+"/"+fname, 'w') + self.checkoutScript.write("#!/bin/sh -f \n") + self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") + def writeCheckout(self,y,jobs,pc): + """ + Brief: Writes the contents of the checkout script by looping through the input yaml + Param: + - self The checkout script object + - y The fremake compile yaml + """ + self.checkoutScript.write("cd "+self.src +"\n") + for c in y['src']: + if type(c['repo']) is list and type(c['branch']) is list: + for (repo,branch) in zip(c['repo'],c['branch']): + writeRepo(self.checkoutScript,repo,c['component'],self.src,branch,c['additionalInstructions'],True,jobs,pc) + else: + writeRepo(self.checkoutScript,c['repo'],c['component'],self.src,c['branch'],c['additionalInstructions'],False,jobs,pc) + + def finish (self,pc): + """ + Brief: If pc is defined: Loops through dictionary of pids, + waits for each pid individually, writes exit code in + `check` list; + allows checkoutscript to exit if exit code is not 0; + closes the checkout script when writing is done + Param: + - self The checkout script object + """ + if pc: + self.checkoutScript.write('for id in ${pids[@]}; do\n wait ${id##*:}\n check+=("clone of ${id%%:*} exited with status $?")\ndone\n') + self.checkoutScript.write('for stat in "${check[@]}"; do\n echo $stat \n if [ ${stat##* } -ne 0 ]; then\n exit ${stat##* }\n fi\ndone') + self.checkoutScript.close() + else: + self.checkoutScript.close() + ## TODO: batch script building - def run (self): - os.chmod(self.src+"/"+self.fname, 0o744) - try: - subprocess.run(args=[self.src+"/"+self.fname], check=True) - except: - print("There was an error with the checkout script "+self.src+"/"+self.fname) - raise + def run (self): + """ + Brief: Changes the permission on the checkout script and runs it + Param: + - self The checkout script object + """ + os.chmod(self.src+"/"+self.fname, 0o744) + try: + subprocess.run(args=[self.src+"/"+self.fname], check=True) + except: + print("There was an error with the checkout script "+self.src+"/"+self.fname) + raise ################################################################################################### ## Subclass for container checkout class checkoutForContainer(checkout): -## \brief Opens the checkout script with the specified name -## \param self The checkout script object -## \param fname The file name of the checkout script -## \param srcDir The source directory where fname will be run and source will exist -## \param tmpdir The relative path on disk that fname will be created (and copied from into the -## container) - def __init__(self,fname,srcDir,tmpdir): - self.fname = fname - self.src = srcDir - self.tmpdir = tmpdir - os.system("mkdir -p "+self.tmpdir) - os.system("rm -rf "+self.tmpdir+"/*") - self.checkoutScript = open(self.tmpdir+"/"+fname, 'w') - self.checkoutScript.write("#!/bin/sh -fx \n") - self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") + """ + Brief: Subclass for container checkout + """ + def __init__(self,fname,srcDir,tmpdir): + """ + Brief: Opens the checkout script with the specified name + Param: + - self : The checkout script object + - fname : The file name of the checkout script + - srcDir : The source directory where fname will be run and source will exist + - tmpdir : The relative path on disk that fname will be created (and copied from into the + container) + """ + self.fname = fname + self.src = srcDir + self.tmpdir = tmpdir + os.system("mkdir -p "+self.tmpdir) + os.system("rm -rf "+self.tmpdir+"/*") + self.checkoutScript = open(self.tmpdir+"/"+fname, 'w') + self.checkoutScript.write("#!/bin/sh -fx \n") + self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") -## \brief Removes the self.tmpdir and contents -## \param self The checkout script object - def cleanup (self): - os.system("rm -rf "+self.tmpdir) + def cleanup (self): + """ + Brief: Removes the self.tmpdir and contents + Param: + - self The checkout script object + """ + os.system("rm -rf "+self.tmpdir) diff --git a/fre/make/gfdlfremake/makefilefre.py b/fre/make/gfdlfremake/makefilefre.py index 8cc6b6ea..0a6196d7 100644 --- a/fre/make/gfdlfremake/makefilefre.py +++ b/fre/make/gfdlfremake/makefilefre.py @@ -1,11 +1,15 @@ import os -import subprocess +#import subprocess import textwrap -## \brief Writes the link line for bare metal and container builds -## \param self The Makefile object def linklineBuild(self): + """ + Brief: Writes the link line for bare metal and container builds + Param: + - self The Makefile object + """ linkline="" + #if additional libraries are defined, populate the link line with the correct information for libraries ## CONTAINER; write a script that will execute in the container, to fill in link line with additional libraries in Makefile if "tmp" in self.filePath: @@ -14,7 +18,7 @@ def linklineBuild(self): for l in self.l: fh.write(l+" ") fh.write("\n") - + self.linklinecreate = ''' line='' for l in $@; do @@ -54,97 +58,115 @@ def linklineBuild(self): os.system(f"sed -i 's|\($(LDFLAGS)\)|$(LL) \\1|' {self.filePath}/Makefile") class makefile(): -## \brief Opens Makefile and sets the experiment and other common variables -## \param self The Makefile object -## \param exp Experiment name -## \param libs Additional libraries/linker flags defined by user -## \param srcDir The path to the source directory -## \param bldDir The path to the build directory -## \param mkTemplatePath The path of the template .mk file for compiling - def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath): - self.e = exp - self.l = libs - self.src = srcDir - self.bld = bldDir - self.template = mkTemplatePath - self.c =[] #components - self.r=[] #requires - self.o=[] #overrides - os.system("mkdir -p "+self.bld) - self.filePath = self.bld # Needed so that the container and bare metal builds can - # use the same function to create the Makefile - -## \brief Adds a component and corresponding requires to the list -## \param self The Makefile object -## \param c The component -## \param r The requires for that componenet -## \param o The overrides for that component - def addComponent (self,c,r,o): - self.c.append(c) - self.r.append(r) - self.o.append(o) -## \brief Sorts the component by how many requires there are for that component -## \param self The Makefile object -## \param c The component -## \param r The requires for that component -## \param o The overrides for that component - def createLibstring (self,c,r,o): - d=zip(self.c,self.r,self.o) - return(sorted(d,key=lambda values:len(values[1]),reverse=True)) - -## \brief Writes the Makefile. Should be called after all components are added -## \param self The Makefile object - def writeMakefile (self): -# Get the list of all of the libraries - sd=self.createLibstring(self.c,self.r,self.o) - libstring=" " - for i in sd: - lib=i[0] - libstring = libstring+lib+"/lib"+lib+".a " -# Open the Makefile for Writing - with open(self.filePath+"/Makefile","w") as fh: -# Write the header information for the Makefile - fh.write("# Makefile for "+self.e+"\n") - fh.write("SRCROOT = "+self.src+"/\n") - fh.write("BUILDROOT = "+self.bld+"/\n") - fh.write("MK_TEMPLATE = "+self.template+"\n") - fh.write("include $(MK_TEMPLATE)"+"\n") -# Write the main experiment compile - fh.write(self.e+".x: "+libstring+"\n") - fh.write("\t$(LD) $^ $(LDFLAGS) -o $@ $(STATIC_LIBS)"+"\n") - -# Write the link line script with user-provided libraries - if self.l: - linklineBuild(self) - -# Write the individual component library compiles - with open(self.filePath+"/Makefile","a") as fh: - for (c,r,o) in sd: - libstring = " " - for lib in r: - libstring = libstring+lib+"/lib"+lib+".a " - cstring = c+"/lib"+c+".a: " - fh.write(cstring+libstring+" FORCE"+"\n") - if o == "": - fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) --directory="+c+" $(@F)\n") - else: - fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) "+o+" --directory="+c+" $(@F)\n") - fh.write("FORCE:\n") - fh.write("\n") -# Set up the clean - fh.write("clean:\n") - for c in self.c: - fh.write("\t$(MAKE) --directory="+c+" clean\n") -# Set up localize - fh.write("localize:\n") - for c in self.c: - fh.write("\t$(MAKE) -f $(BUILDROOT)"+c+" localize\n") -# Set up distclean - fh.write("distclean:\n") - for c in self.c: - fh.write("\t$(RM) -r "+c+"\n") - fh.write("\t$(RM) -r "+self.e+"\n") - fh.write("\t$(RM) -r Makefile \n") + def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath): + """ + Brief: Opens Makefile and sets the experiment and other common variables + Param: + - self The Makefile object + - exp Experiment name + - libs Additional libraries/linker flags defined by user + - srcDir The path to the source directory + - bldDir The path to the build directory + - mkTemplatePath The path of the template .mk file for compiling + """ + self.e = exp + self.l = libs + self.src = srcDir + self.bld = bldDir + self.template = mkTemplatePath + self.c =[] #components + self.r=[] #requires + self.o=[] #overrides + os.system("mkdir -p "+self.bld) + self.filePath = self.bld # Needed so that the container and bare metal builds can + # use the same function to create the Makefile + + def addComponent (self,c,r,o): + """ + Brief: Adds a component and corresponding requires to the list + Param: + - self The Makefile object + - c The component + - r The requires for that componenet + - o The overrides for that component + """ + self.c.append(c) + self.r.append(r) + self.o.append(o) + + def createLibstring (self,c,r,o): + """ + Brief: Sorts the component by how many requires there are for that component + Param: + - self The Makefile object + - c The component + - r The requires for that component + - o The overrides for that component + """ + d=zip(self.c,self.r,self.o) + return(sorted(d,key=lambda values:len(values[1]),reverse=True)) + + def writeMakefile (self): + """ + Brief: Writes the Makefile. Should be called after all components are added + Param: + - self The Makefile object + """ + # Get the list of all of the libraries + sd=self.createLibstring(self.c,self.r,self.o) + libstring=" " + for i in sd: + lib=i[0] + libstring = libstring+lib+"/lib"+lib+".a " + + # Open the Makefile for Writing + with open(self.filePath+"/Makefile","w") as fh: + # Write the header information for the Makefile + fh.write("# Makefile for "+self.e+"\n") + fh.write("SRCROOT = "+self.src+"/\n") + fh.write("BUILDROOT = "+self.bld+"/\n") + fh.write("MK_TEMPLATE = "+self.template+"\n") + fh.write("include $(MK_TEMPLATE)"+"\n") + + # Write the main experiment compile + fh.write(self.e+".x: "+libstring+"\n") + fh.write("\t$(LD) $^ $(LDFLAGS) -o $@ $(STATIC_LIBS)"+"\n") + + # Write the link line script with user-provided libraries + if self.l: + linklineBuild(self) + + # Write the individual component library compiles + with open(self.filePath+"/Makefile","a") as fh: + for (c,r,o) in sd: + libstring = " " + for lib in r: + libstring = libstring+lib+"/lib"+lib+".a " + cstring = c+"/lib"+c+".a: " + fh.write(cstring+libstring+" FORCE"+"\n") + if o == "": + fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) --directory="+c+" $(@F)\n") + else: + fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) "+o+" --directory="+c+" $(@F)\n") + fh.write("FORCE:\n") + fh.write("\n") + + # Set up the clean + fh.write("clean:\n") + for c in self.c: + fh.write("\t$(MAKE) --directory="+c+" clean\n") + + # Set up localize + fh.write("localize:\n") + for c in self.c: + fh.write("\t$(MAKE) -f $(BUILDROOT)"+c+" localize\n") + + # Set up distclean + fh.write("distclean:\n") + for c in self.c: + fh.write("\t$(RM) -r "+c+"\n") + fh.write("\t$(RM) -r "+self.e+"\n") + fh.write("\t$(RM) -r Makefile \n") ### This seems incomplete? ~ ejs ## The makefile class for a container. It gets built into a temporary directory so it can be copied @@ -156,21 +178,24 @@ def writeMakefile (self): ## \param mkTemplatePath The path of the template .mk file for compiling ## \param tmpDir A local path to temporarily store files build to be copied to the container class makefileContainer(makefile): - def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath,tmpDir): - self.e = exp - self.l = libs - self.src = srcDir - self.bld = bldDir - self.template = mkTemplatePath - self.tmpDir = tmpDir - self.c =[] #components - self.r=[] #requires - self.o=[] #overrides - os.system("mkdir -p "+self.tmpDir) - self.filePath = self.tmpDir # Needed so that the container and bare metal builds can + def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath,tmpDir): + self.e = exp + self.l = libs + self.src = srcDir + self.bld = bldDir + self.template = mkTemplatePath + self.tmpDir = tmpDir + self.c =[] #components + self.r=[] #requires + self.o=[] #overrides + os.system("mkdir -p "+self.tmpDir) + self.filePath = self.tmpDir # Needed so that the container and bare metal builds can # use the same function to create the Makefile -## \return the tmpDir -## \param self The makefile object - def getTmpDir(self): - return self.tmpDir + def getTmpDir(self): + """ + Brief: Return the tmpDir + Param: + - self The makefile object + """ + return self.tmpDir diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index 9bb337ed..ee6d0e2c 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -1,102 +1,113 @@ import yaml class platforms (): -## \param self The platform yaml object -## \param fname The path to the platform yaml file -## \param v the fre variables defined in the model Yaml - def __init__(self,yamlFile): #,v): - self.yaml = yamlFile + def __init__(self,fname): + """ + Param: + - self The platform yaml object + - fname The path to the platform yaml file + """ + self.yaml = fname -## Check the yaml for errors/omissions -## Loop through the platforms - for p in self.yaml: -## Check the platform name - try: - p["name"] - except: - print("At least one of the platforms is missing a name in "+fname+"\n") - raise -## Check the compiler - try: - p["compiler"] - except: - print ("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") - raise -## Check for the Fortran (fc) and C (cc) compilers - try: - p["fc"] - except: - print ("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") - raise - try: - p["cc"] - except: - print ("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") - raise -## Check for modules to load - try: - p["modules"] - except: - p["modules"]=[""] -## Check for modulesInit to set up the modules environment - try: - p["modulesInit"] - except: - p["modulesInit"]=[""] -## Get the root for the build - try: - p["modelRoot"] - except: - p["modelRoot"] = "/apps" -## Check if we are working with a container and get the info for that - try: - p["container"] - except: - p["container"] = False - p["RUNenv"] = "" - p["containerBuild"] = "" - p["containerRun"] = "" - if p["container"]: -## Check the container builder - try: + ## Check the yaml for errors/omissions + ## Loop through the platforms + for p in self.yaml: + ## Check the platform name + try: + p["name"] + except: + print("At least one of the platforms is missing a name in "+fname+"\n") + raise + ## Check the compiler + try: + p["compiler"] + except: + print ("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") + raise + ## Check for the Fortran (fc) and C (cc) compilers + try: + p["fc"] + except: + print ("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") + raise + try: + p["cc"] + except: + print ("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") + raise + ## Check for modules to load + try: + p["modules"] + except: + p["modules"]=[""] + ## Check for modulesInit to set up the modules environment + try: + p["modulesInit"] + except: + p["modulesInit"]=[""] + ## Get the root for the build + try: + p["modelRoot"] + except: + p["modelRoot"] = "/apps" + ## Check if we are working with a container and get the info for that + try: + p["container"] + except: + p["container"] = False + p["RUNenv"] = "" + p["containerBuild"] = "" + p["containerRun"] = "" + if p["container"]: + ## Check the container builder + try: p["containerBuild"] - except: + except: print ("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") raise - if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": + if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": raise ValueError("Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") -## Check for container environment set up for RUN commands - try: + ## Check for container environment set up for RUN commands + try: p["RUNenv"] - except: + except: p["RUNenv"] = "" -## Check the container runner - try: + ## Check the container runner + try: p["containerRun"] - except: + except: print ("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") raise - if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": + if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": raise ValueError("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") -## set the location of the mkTemplate. In a container, it uses the hpc-me template cloned from mkmf - p["mkTemplate"] = "/apps/mkmf/templates/hpcme-intel21.mk" - else: - try: + ## set the location of the mkTemplate. + ## In a container, it uses the hpc-me template cloned from mkmf + p["mkTemplate"] = "/apps/mkmf/templates/hpcme-intel21.mk" + else: + try: p["mkTemplate"] - except: + except: raise ValueError("The non-container platform "+p["name"]+" must specify a mkTemplate \n") -## \brief Checks if the platform yaml has the named platform - def hasPlatform(self,name): - for p in self.yaml: - if p["name"] == name: - return True - return False -## \brief Get the platform yaml - def getPlatformsYaml(self): - return self.yaml -## \brief Get the platform information from the name of the platform - def getPlatformFromName(self,name): - for p in self.yaml: - if p["name"] == name: - return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) + def hasPlatform(self,name): + """ + Brief: Checks if the platform yaml has the named platform + """ + for p in self.yaml: + if p["name"] == name: + return True + return False + + def getPlatformsYaml(self): + """ + Brief: Get the platform yaml + """ + return self.yaml + + def getPlatformFromName(self,name): + """ + Brief: Get the platform information from the name of the platform + """ + for p in self.yaml: + if p["name"] == name: + return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) diff --git a/fre/make/gfdlfremake/targetfre.py b/fre/make/gfdlfremake/targetfre.py index b34e381f..0b2b5964 100644 --- a/fre/make/gfdlfremake/targetfre.py +++ b/fre/make/gfdlfremake/targetfre.py @@ -1,65 +1,81 @@ -## Stores information about the target class fretarget: -## Sets up information about the target and handles errors -## \note The default target is prod -## \param self the fretarget object -## \param t The target string - def __init__(self,t): - self.target = t # The target string -## Split the target string - targ = self.target.split('-') - self.makeline_add = "" - self.debug = False - self.repro = False - self.prod = False -## Parse the target string for prod, repro, and debug. Set up what to add to the -## make line during compile when using mkmf builds - for target in targ: - if target == "debug": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.debug = True - elif target == "prod": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.prod = True - elif target == "repro": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.repro = True -## Check to see if openmp is included in the target and add that to the makeline add string - if target == "openmp": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.openmp = True - else: - self.openmp = False -## Check to make sure only one of the prod, debug, repro are used - errormsg = "You can only list one mutually exclusive target, but your target '"+self.target+"' lists more than one of the following targets: \n debug \n prod \n repro" - if self.debug: - try: - if self.repro or self.prod == True: - raise ValueError(errormsg) - except ValueError: - raise - elif self.repro: - try: - if self.prod == True: - raise ValueError(errormsg) - except ValueError: - raise - else: - try: - if self.prod == False: - raise ValueError("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") - except ValueError: - raise -## Returns the name of the target -## \param self The fretarget object - def gettargetName(self): - return self.target -## Returns the makeline_add -## \param self The fretarget object - def getmakeline_add(self): - return self.makeline_add + """ + Class: Stores information about the target + """ + def __init__(self,t): + """ + Brief: Sets up information about the target and handles errors + Note: The default target is prod + Param: + - self the fretarget object + - t The target string + """ + self.target = t # The target string + ## Split the target string + targ = self.target.split('-') + self.makeline_add = "" + self.debug = False + self.repro = False + self.prod = False + + ## Parse the target string for prod, repro, and debug. Set up what to add to the + ## make line during compile when using mkmf builds + for target in targ: + if target == "debug": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.debug = True + elif target == "prod": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.prod = True + elif target == "repro": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.repro = True + + ## Check to see if openmp is included in the target and add that to the makeline add string + if target == "openmp": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.openmp = True + else: + self.openmp = False + + ## Check to make sure only one of the prod, debug, repro are used + errormsg = "You can only list one mutually exclusive target, but your target '"+self.target+"' lists more than one of the following targets: \n debug \n prod \n repro" + if self.debug: + try: + if self.repro or self.prod == True: + raise ValueError(errormsg) + except ValueError: + raise + elif self.repro: + try: + if self.prod == True: + raise ValueError(errormsg) + except ValueError: + raise + else: + try: + if self.prod == False: + raise ValueError("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") + except ValueError: + raise + + def gettargetName(self): + """ + Brief: Returns the name of the target + Param: + - self The fretarget object + """ + return self.target + + def getmakeline_add(self): + """ + Brief: Returns the makeline_add + Param: + - self The fretarget object + """ + return self.makeline_add diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 02b23812..7511fa91 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -1,27 +1,33 @@ import os -from pathlib import Path -import shutil -import yaml import json +import yaml from jsonschema import validate, ValidationError, SchemaError from . import platformfre -## Open the yaml file and parse as fremakeYaml -## \param fname the name of the yaml file to parse def parseCompile(fname): -# Open the yaml file and parse as fremakeYaml - with open(fname, 'r') as yamlfile: - y = yaml.safe_load(yamlfile) + """ + Brief: Open the yaml file and parse as fremakeYaml + Param: + - fname the name of the yaml file to parse + """ + # Open the yaml file and parse as fremakeYaml + with open(fname, 'r') as yamlfile: + y = yaml.safe_load(yamlfile) - return y + return y -## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will +## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will ## set a default value for the variable -## \param var A variable in the yaml -## \param val a default value for var -## \param req if true, the variable is required in the yaml and an exception will be raised -## \param err An error message to print if the variable is required and doesn't exist #def yamlVarCheck(var,val="",req=False,err="error"): +# """ +# Brief: Checks the yaml for variables. Required variables will dump and error. +# Non-required variables will set a default value for the variable +# Param: +# - var A variable in the yaml +# - val a default value for var +# - req if true, the variable is required in the yaml and an exception will be raised +# - err An error message to print if the variable is required and doesn't exist +# """ # try: # var # except: @@ -31,113 +37,124 @@ def parseCompile(fname): # else: # var = val -## This will read the compile yaml for FRE and then fill in any of the missing non-required variables class compileYaml(): -## Read get the compile yaml and fill in the missing pieces -## \param self the compile Yaml object -## \yamlFile The path to the compile yaml file - def __init__(self,yamlFile): - self.yaml = yamlFile + """ + Brief: This will read the compile yaml for FRE and then fill in any of the missing non-required variables + """ + def __init__(self,yamlFile): + """ + Brief: Read get the compile yaml and fill in the missing pieces + Param: + - self the compile Yaml object + - yamlFile The path to the compile yaml file + """ + self.yaml = yamlFile - ## Check the yaml for required things - ## Check for required experiment name - try: - self.yaml["experiment"] - except: - print("You must set an experiment name to compile \n") - raise - ## Check for optional libraries and packages for linking in container - try: - self.yaml["container_addlibs"] - except: - self.yaml["container_addlibs"]="" - ## Check for optional libraries and packages for linking on bare-metal system - try: - self.yaml["baremetal_linkerflags"] - except: - self.yaml["baremetal_linkerflags"]="" - ## Check for required src - try: - self.yaml["src"] - except: - print("You must set a src to specify the sources in modelRoot/"+self.yaml["experiment"]+"\n") - raise - ## Loop through the src array - for c in self.yaml['src']: - ## Check for required componenet name - try: - c['component'] - except: - print("You must set the 'componet' name for each src component") - raise - ## Check for required repo url - try: - c['repo'] - except: - print("'repo' is missing from the component "+c['component']+" in "+self.yaml["experiment"]+"\n") - raise - # Check for optional branch. Otherwise set it to blank - try: - c['branch'] - except: - c['branch']="" - # Check for optional cppdefs. Otherwise set it to blank - try: - c['cppdefs'] - except: - c['cppdefs']="" - # Check for optional doF90Cpp. Otherwise set it to False - try: - c['doF90Cpp'] - except: - c['doF90Cpp']=False - # Check for optional additional instructions. Otherwise set it to blank - try: - c['additionalInstructions'] - except: - c['additionalInstructions']="" - # Check for optional paths. Otherwise set it to blank - try: - c['paths'] - except: - c['paths']=[c['component']] - # Check for optional requires. Otherwise set it to blank - try: - c['requires'] - except: - c['requires']=[] - # Check for optional overrides. Otherwise set it to blank - try: - c['makeOverrides'] - except: - c['makeOverrides']="" - # Check for optional flags. Otherwise set it to blank. - try: - c["otherFlags"] - except: - c["otherFlags"]="" + ## Check the yaml for required things + ## Check for required experiment name + try: + self.yaml["experiment"] + except: + print("You must set an experiment name to compile \n") + raise + ## Check for optional libraries and packages for linking in container + try: + self.yaml["container_addlibs"] + except: + self.yaml["container_addlibs"]="" + ## Check for optional libraries and packages for linking on bare-metal system + try: + self.yaml["baremetal_linkerflags"] + except: + self.yaml["baremetal_linkerflags"]="" + ## Check for required src + try: + self.yaml["src"] + except: + print("You must set a src to specify the sources in modelRoot/"+self.yaml["experiment"]+"\n") + raise + ## Loop through the src array + for c in self.yaml['src']: + ## Check for required componenet name + try: + c['component'] + except: + print("You must set the 'componet' name for each src component") + raise + ## Check for required repo url + try: + c['repo'] + except: + print("'repo' is missing from the component "+c['component']+" in "+self.yaml["experiment"]+"\n") + raise + # Check for optional branch. Otherwise set it to blank + try: + c['branch'] + except: + c['branch']="" + # Check for optional cppdefs. Otherwise set it to blank + try: + c['cppdefs'] + except: + c['cppdefs']="" + # Check for optional doF90Cpp. Otherwise set it to False + try: + c['doF90Cpp'] + except: + c['doF90Cpp']=False + # Check for optional additional instructions. Otherwise set it to blank + try: + c['additionalInstructions'] + except: + c['additionalInstructions']="" + # Check for optional paths. Otherwise set it to blank + try: + c['paths'] + except: + c['paths']=[c['component']] + # Check for optional requires. Otherwise set it to blank + try: + c['requires'] + except: + c['requires']=[] + # Check for optional overrides. Otherwise set it to blank + try: + c['makeOverrides'] + except: + c['makeOverrides']="" + # Check for optional flags. Otherwise set it to blank. + try: + c["otherFlags"] + except: + c["otherFlags"]="" -## Returns the compile yaml - def getCompileYaml(self): - try: - self.yaml - except: - print ("You must initialize the compile YAML object before you try to get the yaml \n") - raise - return self.yaml - -######################################################################################################################### -## \description This will take the models yaml file which has a list of the sub yaml files and combine them into the -## full freyaml that can be used and checked -# platformYaml: platforms.yaml -# compileYaml: compile.yaml + def getCompileYaml(self): + """ + Brief: Returns the compile yaml + """ + try: + self.yaml + except: + print ("You must initialize the compile YAML object before you try to get the yaml \n") + raise + return self.yaml class freyaml(): -## \param self The freyaml object -## \param combinedyaml The name of the combined yaml file - def __init__(self,combinedyaml): - # Parse - self.combined = parseCompile(combinedyaml) + """ + Brief: This will take the models yaml file which has a list of the sub yaml files and combine them into the + Note: + - full freyaml that can be used and checked + - platformYaml: platforms.yaml + - compileYaml: compile.yaml + """ + def __init__(self,combinedyaml): + """ + Param: + - self The freyaml object + - combinedyaml The name of the combined yaml file + """ + # Parse + self.combined = parseCompile(combinedyaml) ## Validate the YAML # fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) @@ -145,23 +162,27 @@ def __init__(self,combinedyaml): # with open(schema_path, 'r') as f: # s = f.read() # schema = json.loads(s) -# validate(instance=self.combined,schema=schema) +# validate(instance=self.combined,schema=schema) # print("\nCOMBINED YAML VALID") - #get compile info - self.compiledict = self.combined.get("compile") - self.compile = compileYaml(self.compiledict) - self.compileyaml = self.compile.getCompileYaml() - - #get platform info - self.platformsdict = self.combined.get("platforms") - self.platforms = platformfre.platforms(self.platformsdict) - self.platformsyaml = self.platforms.getPlatformsYaml() + #get compile info + self.compiledict = self.combined.get("compile") + self.compile = compileYaml(self.compiledict) + self.compileyaml = self.compile.getCompileYaml() + + #get platform info + self.platformsdict = self.combined.get("platforms") + self.platforms = platformfre.platforms(self.platformsdict) + self.platformsyaml = self.platforms.getPlatformsYaml() -## Returns the compile yaml - def getCompileYaml(self): - return self.compileyaml + def getCompileYaml(self): + """ + Brief: Returns the compile yaml + """ + return self.compileyaml -## Returns the compile yaml - def getPlatformsYaml(self): - return self.platformsyaml + def getPlatformsYaml(self): + """ + Brief: Returns the compile yaml + """ + return self.platformsyaml From c9cf8064ded33b755b23c10c2caae17d0d772e62 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 30 Aug 2024 09:59:51 -0400 Subject: [PATCH 13/61] #141 Add freVars object - accidentally removed --- fre/make/createCheckout.py | 5 ++++- fre/make/createCompile.py | 5 ++++- fre/make/createDocker.py | 5 ++++- fre/make/createMakefile.py | 7 +++++-- fre/make/runFremake.py | 5 ++++- 5 files changed, 21 insertions(+), 6 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index 89a4f96f..b1ae35cf 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -34,8 +34,11 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job plist = platform tlist = target + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml) + modelYaml = yamlfre.freyaml(yml,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index e8cd13d9..d9a8bb3a 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -30,8 +30,11 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver plist = platform tlist = target + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml) + modelYaml = yamlfre.freyaml(yml,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index 355ce17e..91d0ca64 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -17,8 +17,11 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): name = experiment run = execute + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml) + modelYaml = yamlfre.freyaml(yml,freVars) fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 36f5dd00..de36fcd5 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -14,9 +14,12 @@ def makefile_create(yamlfile,experiment,platform,target): tlist = target yml = yamlfile name = experiment - + + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml) + modelYaml = yamlfre.freyaml(yml,freVars) fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 8ad9d52c..1fbe8ff4 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -42,8 +42,11 @@ def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_paral plist = platform tlist = target + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml) + modelYaml = yamlfre.freyaml(yml,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets From 96bfcf3381ff900bbd38098ff6cdfc933b4469d0 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 30 Aug 2024 10:00:59 -0400 Subject: [PATCH 14/61] #141 Use freVars object to parse and fill in missing info --- fre/make/gfdlfremake/platformfre.py | 7 ++-- fre/make/gfdlfremake/yamlfre.py | 52 ++++++++++++++++++----------- 2 files changed, 36 insertions(+), 23 deletions(-) diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index ee6d0e2c..7348f3a7 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -1,13 +1,14 @@ import yaml class platforms (): - def __init__(self,fname): + def __init__(self,platforminfo): """ Param: - self The platform yaml object - - fname The path to the platform yaml file + - platforminfo dictionary with platform information + from the combined yaml """ - self.yaml = fname + self.yaml = platforminfo ## Check the yaml for errors/omissions ## Loop through the platforms diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 7511fa91..0891cc9e 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -4,18 +4,20 @@ from jsonschema import validate, ValidationError, SchemaError from . import platformfre -def parseCompile(fname): +def parseCompile(fname,v): """ Brief: Open the yaml file and parse as fremakeYaml Param: - fname the name of the yaml file to parse + - v the FRE yaml variables """ # Open the yaml file and parse as fremakeYaml with open(fname, 'r') as yamlfile: - y = yaml.safe_load(yamlfile) + y = yaml.safe_load(v.freVarSub(yamlfile.read())) return y +##### THIS SEEMS UNUSED ## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will ## set a default value for the variable #def yamlVarCheck(var,val="",req=False,err="error"): @@ -41,14 +43,15 @@ class compileYaml(): """ Brief: This will read the compile yaml for FRE and then fill in any of the missing non-required variables """ - def __init__(self,yamlFile): + def __init__(self,compileinfo): """ Brief: Read get the compile yaml and fill in the missing pieces Param: - self the compile Yaml object - - yamlFile The path to the compile yaml file + - compileinfo dictionary with compile information from the combined yaml """ - self.yaml = yamlFile + # compile information from the combined yaml + self.yaml = compileinfo ## Check the yaml for required things ## Check for required experiment name @@ -141,40 +144,49 @@ def getCompileYaml(self): class freyaml(): """ - Brief: This will take the models yaml file which has a list of the sub yaml files and combine them into the + Brief: This will take the combined yaml file, parse information, and fill in missing variables + to make the full freyaml that can be used and checked Note: - - full freyaml that can be used and checked - platformYaml: platforms.yaml - compileYaml: compile.yaml """ - def __init__(self,combinedyaml): + def __init__(self,combinedyaml,v): """ Param: - self The freyaml object - combinedyaml The name of the combined yaml file + - v FRE yaml variables """ - # Parse - self.combined = parseCompile(combinedyaml) + self.combinedfile = combinedyaml -## Validate the YAML -# fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) -# schema_path = os.path.join(fremake_package_dir, 'schema.json') -# with open(schema_path, 'r') as f: -# s = f.read() -# schema = json.loads(s) -# validate(instance=self.combined,schema=schema) -# print("\nCOMBINED YAML VALID") + self.freyaml = parseCompile(self.combinedfile, v) #get compile info - self.compiledict = self.combined.get("compile") + self.compiledict = self.freyaml.get("compile") self.compile = compileYaml(self.compiledict) self.compileyaml = self.compile.getCompileYaml() + #self.freyaml.update(self.compileyaml) + #get platform info - self.platformsdict = self.combined.get("platforms") + self.platformsdict = self.freyaml.get("platforms") self.platforms = platformfre.platforms(self.platformsdict) self.platformsyaml = self.platforms.getPlatformsYaml() + #self.freyaml.update(self.platformsyaml) + +####TO-DO: CREATE A SCHEMA FOR THE COMBINED YAML (will apply to fre make and fre pp combiend yaml - same yaml) +## VALIDATION OF COMBINED YAML CAN ALSO HAPPEN IN FRE YAMLTOOLS COMBINE-YAML + +## Validate the YAML +# fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) +# schema_path = os.path.join(fremake_package_dir, 'schema.json') +# with open(schema_path, 'r') as f: +# s = f.read() +# schema = json.loads(s) +# validate(instance=self.combined,schema=schema) +# print("\nCOMBINED YAML VALID") + def getCompileYaml(self): """ Brief: Returns the compile yaml From cf8c89c17941d3d70f28919741a4354334d30107 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 30 Aug 2024 10:16:44 -0400 Subject: [PATCH 15/61] #141 Update README --- fre/make/README.md | 64 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 19 deletions(-) diff --git a/fre/make/README.md b/fre/make/README.md index bb1215cf..6182e677 100644 --- a/fre/make/README.md +++ b/fre/make/README.md @@ -69,71 +69,98 @@ Additionally, as mentioned, multiple targets can be used more multiple target-pl - `-n, --parallel [number of concurrent modile compiles]` ## Guide +In order to use the `fre make` tools, remember to create a combined yaml first. This can be done with the `fre yamltools combine-yamls` tool. This combines the model, compile, platform, experiment, and any analysis yamls into ONE yaml file for parsing and validation. + +To combine: +`fre yamltools combine-yamls -y [model yaml file] -e [experiment name] -p [platform] -t [target]` + ### **Bare-metal Build:** ```bash +## NOTE: Remember to create the combined yaml first! +## The targets used in fremake are taken from the fre make command itself +# Create combined yaml +fre yamltools combine-yamls -y [model yaml file] -e [experiment name] -p [platform] -t [target] + # Create checkout script -fre make create-checkout -y [experiment yaml file] -p [platform] -t [target] +fre make create-checkout -y [combined yaml file] -e [experiment name] -p [platform] -t [target] # Create and run checkout script -fre make create-checkout -y [experiment yaml file] -p [platform] -t [target] -e +fre make create-checkout -y [combined yaml file] -e [experiment name] -p [platform] -t [target] --execute # Create Makefile -fre make create-makefile -y [experiment yaml file] -p [platform] -t [target] +fre make create-makefile -y [combined yaml file] -e [experiment name] -p [platform] -t [target] # Creat the compile script -fre make create-compile -y [experiment yaml file] -p [platform] -t [target] +fre make create-compile -y [combined yaml file] -e [experiment name] -p [platform] -t [target] # Create and run the compile script -fre make create-compile -y [experiment yaml file] -p [platform] -t [target] -e +fre make create-compile -y [combined yaml file] -e [experiment name] -p [platform] -t [target] --execute # Run all of fremake -fre make run-fremake -y [experiment yaml] -p [platform] -t [target] [other options...] +fre make run-fremake -y [combined yaml] -e [experiment name] -p [platform] -t [target] [other options...] ``` ### **Bare-metal Build (Multi-target example):** ```bash +## NOTE: Remember to create the combined yaml first! +## The targets used in fremake are taken from the fre make command itself +# Create combined yaml +fre yamltools combine-yamls -y am5.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t debug + # Create checkout script -fre make create-checkout -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug +fre make create-checkout -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug # Create and run checkout script -fre make create-checkout -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug -e +fre make create-checkout -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug --execute # Create Makefile -fre make create-makefile -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug +fre make create-makefile -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug # Creat the compile script -fre make create-compile -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug +fre make create-compile -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug # Create and run the compile script -fre make create-compile -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug -e +fre make create-compile -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug --execute # Run all of fremake -fre make run-fremake -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug [other options...] +fre make run-fremake -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug ``` ### **Container Build:** For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. ***To reiterate, users will not be able to create containers unless they have podman access on gaea.*** ```bash +## NOTE: Remember to create the combined yaml first! +## The targets used in fremake are taken from the fre make command itself +# Create combined yaml +fre yamltools combine-yamls -y [model yaml] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] + # Create checkout script -fre make create-checkout -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] -npc +fre make create-checkout -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] -npc # Create and run checkout script -fre make create-checkout -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] -e -npc +fre make create-checkout -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] --execute -npc # Create Makefile -fre make create-makefile -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] +fre make create-makefile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] # Create the compile script -fre make create-compile -y [experiment yaml file] -p [CONTAINER PLATFORM]-t [target] +fre make create-compile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM]-t [target] + +# Create and run the compile script +fre make create-compile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM]-t [target] --execute #Create a Dockerfile -fre make create-dockerfile -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] +fre make create-dockerfile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] # Create and run the Dockerfile -fre make create-dockerfile -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] +fre make create-dockerfile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] --execute ``` ### **Container Build (Multi-target example):** ```bash +# NOTE: multi-target will be taken from fre make commands +# Create combined yaml +fre yamltools combine-yamls -y am5.yaml -e c96L65_am5f7b12r1_amip -p hpcme.2023 -t debug + # Create checkout script fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -npc @@ -152,4 +179,3 @@ fre make create-compile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -e # Run all of fremake fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug [other options...] -npc ``` - From 36860a996923f41d3676f147e473316732e7382e Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 09:56:11 -0400 Subject: [PATCH 16/61] #141 Update schema for combined yaml --- fre/make/gfdlfremake/schema.json | 63 ++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/fre/make/gfdlfremake/schema.json b/fre/make/gfdlfremake/schema.json index 7e460d6f..2ab69ba6 100644 --- a/fre/make/gfdlfremake/schema.json +++ b/fre/make/gfdlfremake/schema.json @@ -5,31 +5,23 @@ "description": "A yaml base for use with fre canopy", "type": "object", "properties": { - "platformYaml": { - "description": "Path to the platform yaml file", - "type": "string" - }, - "compileYaml": { - "description": "Path to the compile yaml file", - "type": "string" + "name": { + "description": "The name of the experiment", + "type": "string" }, - "experiment": { - "description": "The name of the model", - "type": "string" - }, - "container_addlibs":{ - "description": "Libraries and packages needed for linking in the container", - "type": ["array","string","null"] + "platform": { + "description": "The platforms listed in the command", + "type": ["string","array"] }, - "baremetal_linkerflags":{ - "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", - "type": ["array","string","null"] + "target": { + "description": "The targets listed in the command", + "type": ["string","array"] + }, + "compile": { + "description": "The source code descriptions", + "type": "object", + "items":{"$ref": "#/$defs/comp" } }, - "src":{ - "description": "The source code descriptions", - "type": "array", - "items":{"$ref": "#/$defs/comp" } - }, "platforms":{ "description": "FRE platforms", "type": "array", @@ -39,8 +31,31 @@ "$defs":{ "comp": { "type": "object", - "required": ["component", "repo"], + "required": ["experiment","src"], "properties":{ + "experiment": { + "description": "The name of the model", + "type": "string" + }, + "container_addlibs":{ + "description": "Libraries and packages needed for linking in the container", + "type": ["array","string","null"] + }, + "baremetal_linkerflags":{ + "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", + "type": ["array","string","null"] + }, + "src":{ + "description": "The source code descriptions", + "type": "array", + "items":{"$ref": "#/$defs/source" } + } + } + }, + "source": { + "type": "object", + "required": ["component", "repo"], + "properties":{ "component": { "type": "string", "description": "The name of the model component" @@ -159,6 +174,6 @@ "additionalProperties": false } }, - "required": ["experiment", "src", "platforms"], + "required": ["platforms"], "additionalProperties": true } From 309b59f70ca3d1f2cda6e9b669adceb636261998 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 09:59:24 -0400 Subject: [PATCH 17/61] #141 Reference class or use of combine-yamls tool - combine-yamls here if tool wasn't used --- fre/make/createCheckout.py | 30 ++++++++++++++++++++++++++++-- fre/make/createCompile.py | 30 +++++++++++++++++++++++++++--- fre/make/createDocker.py | 33 +++++++++++++++++++++++++++++---- fre/make/createMakefile.py | 30 ++++++++++++++++++++++++++++-- 4 files changed, 112 insertions(+), 11 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index b1ae35cf..dd8a7f99 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -5,8 +5,14 @@ import logging import sys import click +from pathlib import Path from .gfdlfremake import varsfre, platformfre, yamlfre, checkout, targetfre +# Relative import +f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(f) +import yamltools.combine_yamls as cy + @click.command() def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,jobs,execute,verbose): # Define variables @@ -34,11 +40,31 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job plist = platform tlist = target + ## If combined yaml does not exists, combine model, compile, and platform yamls + cd = Path.cwd() + combined = Path(f"combined-{name}.yaml") + combined_path=os.path.join(cd,combined) + + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists: + ## Make sure that the previously created combined yaml is valid + yamlfre.validate_yaml(combined_path) + + full_combined = combined_path + + else: + ## Combine yaml files to parse + comb = cy.init_compile_yaml(yml,experiment,platform,target) + comb_yaml = comb.combine_model() + comb_compile = comb.combine_compile() + comb_platform = comb.combine_platforms() + full_combined = comb.clean_yaml() + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index d9a8bb3a..01ca0110 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -3,11 +3,16 @@ import os import sys import logging +from pathlib import Path from multiprocessing.dummy import Pool - import click from .gfdlfremake import varsfre, platformfre, yamlfre, targetfre, buildBaremetal +# Relative import +f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(f) +import yamltools.combine_yamls as cy + @click.command() def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,verbose): # Define variables @@ -30,11 +35,30 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver plist = platform tlist = target + ## If combined yaml does not exist, combine model, compile, and platform yamls + cd = Path.cwd() + combined = Path(f"combined-{name}.yaml") + combined_path=os.path.join(cd,combined) + + if Path(combined_path).exists: + ## Make sure that the previously created combined yaml is valid + yamlfre.validate_yaml(combined_path) + + full_combined = combined_path + + else: + ## Combine yaml files to parse + comb = cy.init_compile_yaml(yml,experiment,platform,target) + comb_yaml = comb.combine_model() + comb_compile = comb.combine_compile() + comb_platform = comb.combine_platforms() + full_combined = comb.clean_yaml() + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index 91d0ca64..1458d3e9 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -2,9 +2,15 @@ import os import sys +from pathlib import Path import click from .gfdlfremake import varsfre, targetfre, makefilefre, platformfre, yamlfre, buildDocker +# Relative import +f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(f) +import yamltools.combine_yamls as cy + @click.command() def dockerfile_create(yamlfile, experiment, platform, target, execute): srcDir="src" @@ -17,11 +23,30 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): name = experiment run = execute + ## If combined yaml does not exist, combine model, compile, and platform yamls + cd = Path.cwd() + combined = Path(f"combined-{name}.yaml") + combined_path=os.path.join(cd,combined) + + if Path(combined_path).exists: + ## Make sure that the previously created combined yaml is valid + yamlfre.validate_yaml(combined_path) + + full_combined = combined_path + + else: + ## Combine yaml files to parse + comb = cy.init_compile_yaml(yml,experiment,platform,target) + comb_yaml = comb.combine_model() + comb_compile = comb.combine_compile() + comb_platform = comb.combine_platforms() + full_combined = comb.clean_yaml() + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] @@ -33,7 +58,7 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): pass else: raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) - + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target @@ -68,7 +93,7 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): dockerBuild.writeDockerfileMkmf(c) dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") - + ah0 currDir = os.getcwd() click.echo("\ntmpDir created in " + currDir + "/tmp") click.echo("Dockerfile created in " + currDir +"\n") diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index de36fcd5..2e797dbf 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -1,9 +1,16 @@ #!/usr/bin/python3 import os +import sys +from pathlib import Path import click from .gfdlfremake import makefilefre, varsfre, targetfre, yamlfre +# Relative import +f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(f) +import yamltools.combine_yamls as cy + @click.command() def makefile_create(yamlfile,experiment,platform,target): srcDir="src" @@ -15,11 +22,30 @@ def makefile_create(yamlfile,experiment,platform,target): yml = yamlfile name = experiment + ## If combined yaml does not exist, combine model, compile, and platform yamls + cd = Path.cwd() + combined = Path(f"combined-{name}.yaml") + combined_path=os.path.join(cd,combined) + + if Path(combined_path).exists: + ## Make sure that the previously created combined yaml is valid + yamlfre.validate_yaml(combined_path) + + full_combined = combined_path + + else: + ## Combine yaml files to parse + comb = cy.init_compile_yaml(yml,experiment,platform,target) + comb_yaml = comb.combine_model() + comb_compile = comb.combine_compile() + comb_platform = comb.combine_platforms() + full_combined = comb.clean_yaml() + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] From be9e821d253387afe38a3d06a65573d1dc64931b Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 10:00:21 -0400 Subject: [PATCH 18/61] #141 Add validation of yaml --- fre/make/gfdlfremake/yamlfre.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 0891cc9e..9a54f73a 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -17,6 +17,20 @@ def parseCompile(fname,v): return y +## VALIDATION OF COMBINED YAML CAN ALSO HAPPEN IN FRE YAMLTOOLS COMBINE-YAML +def validate_yaml(yamlfile): + with open(yamlfile,'r') as yf: + cy = yaml.safe_load(yf) + ## Validate the YAML + fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) + schema_path = os.path.join(fremake_package_dir, 'schema.json') + with open(schema_path, 'r') as f: + s = f.read() + schema = json.loads(s) + + validate(instance=cy,schema=schema) + print("\nCOMBINED YAML VALID") + ##### THIS SEEMS UNUSED ## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will ## set a default value for the variable @@ -175,18 +189,6 @@ def __init__(self,combinedyaml,v): #self.freyaml.update(self.platformsyaml) -####TO-DO: CREATE A SCHEMA FOR THE COMBINED YAML (will apply to fre make and fre pp combiend yaml - same yaml) -## VALIDATION OF COMBINED YAML CAN ALSO HAPPEN IN FRE YAMLTOOLS COMBINE-YAML - -## Validate the YAML -# fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) -# schema_path = os.path.join(fremake_package_dir, 'schema.json') -# with open(schema_path, 'r') as f: -# s = f.read() -# schema = json.loads(s) -# validate(instance=self.combined,schema=schema) -# print("\nCOMBINED YAML VALID") - def getCompileYaml(self): """ Brief: Returns the compile yaml From 999d70d5fa680b4ddf956011fac9b98fb0267e3b Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 10:38:42 -0400 Subject: [PATCH 19/61] #141 Update example yamls --- fre/make/tests/AM5_example/am5.yaml | 33 +++++++------------ .../AM5_example/yaml_include/platforms.yaml | 4 +-- 2 files changed, 13 insertions(+), 24 deletions(-) diff --git a/fre/make/tests/AM5_example/am5.yaml b/fre/make/tests/AM5_example/am5.yaml index db6bcd91..20dee565 100644 --- a/fre/make/tests/AM5_example/am5.yaml +++ b/fre/make/tests/AM5_example/am5.yaml @@ -70,57 +70,46 @@ experiments: - name: "c96L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c96_amip.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c96_clim.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c384L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c384_amip.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c384_clim.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c384_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c96_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: - - *compile_yaml + compile: *compile_yaml - name: "c96L65_am5f7b12r1_amip_cosp" pp: - "yaml_include/pp.c96_amip.yaml" - compile: - - *compile_yaml + compile: *compile_yaml diff --git a/fre/make/tests/AM5_example/yaml_include/platforms.yaml b/fre/make/tests/AM5_example/yaml_include/platforms.yaml index 7e1b9f49..60d1aad2 100644 --- a/fre/make/tests/AM5_example/yaml_include/platforms.yaml +++ b/fre/make/tests/AM5_example/yaml_include/platforms.yaml @@ -5,7 +5,7 @@ platforms: modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] modelRoot: ${HOME}/fremake_canopy/test - name: ncrc5.intel23 compiler: intel @@ -13,7 +13,7 @@ platforms: modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] modelRoot: ${HOME}/fremake_canopy/test - name: hpcme.2023 compiler: intel From f553b20ca0f311f7000537463db60d85d08e347a Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 11:08:29 -0400 Subject: [PATCH 20/61] #141 Add yaml validation --- fre/make/createCheckout.py | 5 +++-- fre/make/createCompile.py | 2 ++ fre/make/createDocker.py | 2 ++ fre/make/createMakefile.py | 2 ++ 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index dd8a7f99..7c7ca760 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -46,10 +46,9 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job combined_path=os.path.join(cd,combined) # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists: + if Path(combined_path).exists(): ## Make sure that the previously created combined yaml is valid yamlfre.validate_yaml(combined_path) - full_combined = combined_path else: @@ -59,6 +58,8 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() + # Validate the yaml + yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index 01ca0110..12a5381b 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -53,6 +53,8 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() + # Validate the yaml + yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index 1458d3e9..bb621451 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -41,6 +41,8 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() + # Validate the yaml + yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 2e797dbf..3f8d0c8b 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -40,6 +40,8 @@ def makefile_create(yamlfile,experiment,platform,target): comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() + # Validate the yaml + yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) From 86cbc87e0d31d53bf774ea8838e269265ca41254 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 11:10:01 -0400 Subject: [PATCH 21/61] #141 Fix combining yamls and validation - reflects changes in `fre yamltools combine-yamls` - references defined `init_pp_yamls` class in `combine-yamls` to make combining a default behavior (on top of tool still being available if user wants to combine separately) - update schema.json for pp combined yaml --- fre/pp/configure_script_yaml.py | 63 ++++++++++++++----- fre/pp/schema.json | 108 ++++++++++++++++++-------------- 2 files changed, 107 insertions(+), 64 deletions(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index a664cef1..31746223 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -10,27 +10,18 @@ import os import json +import sys import shutil import click +from pathlib import Path from jsonschema import validate import yaml import metomi.rose.config -######VALIDATE##### -package_dir = os.path.dirname(os.path.abspath(__file__)) -schema_path = os.path.join(package_dir, 'schema.json') -def validate_yaml(file): - """ - Using the schema.json file, the yaml format is validated. - """ - # Load the json schema: .load() (vs .loads()) reads and parses the json in one - with open(schema_path) as s: - schema = json.load(s) - - # Validate yaml - # If the yaml is not valid, the schema validation will raise errors and exit - if validate(instance=file,schema=schema) is None: - print("YAML VALID") +# Relative import +f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(f) +import yamltools.combine_yamls as cy #################### def yaml_load(yamlfile): @@ -43,6 +34,25 @@ def yaml_load(yamlfile): return y +######VALIDATE##### +def validate_yaml(yamlfile): + """ + Using the schema.json file, the yaml format is validated. + """ + # Load the yaml + yml = yaml_load(yamlfile) + + package_dir = os.path.dirname(os.path.abspath(__file__)) + schema_path = os.path.join(package_dir, 'schema.json') + # Load the json schema: .load() (vs .loads()) reads and parses the json in one + with open(schema_path,'r') as s: + schema = json.load(s) + + # Validate yaml + # If the yaml is not valid, the schema validation will raise errors and exit + if validate(instance=yml,schema=schema) is None: + print("YAML VALID") + #################### def rose_init(experiment,platform,target): """ @@ -152,8 +162,29 @@ def _yamlInfo(yamlfile,experiment,platform,target): # Initialize the rose configurations rose_suite,rose_regrid,rose_remap = rose_init(e,p,t) + ## If combined yaml does not exists, combine model, experiment, and analysis yamls + cd = Path.cwd() + combined = Path(f"combined-{e}.yaml") + combined_path=os.path.join(cd,combined) + + # If fre yamltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): + ## Make sure that the previously created combined yaml is valid + validate_yaml(str(combined_path)) + full_combined = combined_path + else: + ## Combine yaml files to parse + comb = cy.init_pp_yaml(yml,e,p,t) + comb_model = comb.combine_model() + comb_exp = comb.combine_experiment() + comb_analysis = comb.combine_analysis() + full_combined = comb.clean_yaml() + + # Validate yaml + validate_yaml(full_combined) + # Load the combined yaml - comb_pp_yaml = yaml_load(yml) + comb_pp_yaml = yaml_load(full_combined) ## PARSE COMBINED YAML TO CREATE CONFIGS # Set rose-suite items diff --git a/fre/pp/schema.json b/fre/pp/schema.json index e69b4b3d..dfb9cff5 100644 --- a/fre/pp/schema.json +++ b/fre/pp/schema.json @@ -7,57 +7,69 @@ "platform": {"type": "string"}, "target": {"type": "string"}, "directories": { - "history_dir": {"type":"string"}, - "pp_dir": {"type":"string"}, - "ptmp_dir": {"type":"string"}, - "refinediag_scripts":{"type":["string","null"]}, - "preanalysis_script":{"type":["string","null"]}, - "history_refined":{"type":["string","null"]}, - "analysis":{"type":["string","null"]}, - "pp_grid_spec": {"type":"string"}, - "fre_analysis_home": {"type":["string","null"]} + "description": "FRE shared directories", + "type": "object", + "items":{"$ref": "#/$defs/dirs" } }, "postprocess": { + "description": "FRE post-processing information", "type": "object", - "properties": { - "settings": { - "type:": "object", - "properties": { - "history_segment": {"type":"string"}, - "site": {"type":"string"}, - "pp_chunk_a": {"type":"string"}, - "pp_chunk_b": {"type":"string"}, - "pp_start": {"type":"string"}, - "pp_stop": {"type":"string"}, - "pp_components": {"type":"string"} - } - }, - "switches": { - "type": "object", - "properties": { - "clean_work": {"type":"boolean"}, - "do_mdtf": {"type":"boolean"}, - "do_statics": {"type":"boolean"}, - "do_timeavgs": {"type":"boolean"}, - "do_refinediag": {"type":"boolean"}, - "do_atmos_plevel_masking": {"type":"boolean"}, - "do_preanalysis": {"type":"boolean"}, - "do_analysis": {"type":"boolean"}, - "do_analysis_only": {"type":"boolean"} - } - }, - "components": { - "type": "array", - "properties": { - "type": {"type":"string"}, - "sources": {"type":"string"}, - "sourceGrid": {"type":"string"}, - "xyInterp": {"type":"string"}, - "interpMethod": {"type":"string"}, - "inputRealm": {"type":"string"} - } - } - } + "items":{"$ref": "#/$defs/pp" } } + }, + "$defs": { + "dirs": { + "history_dir": {"type":"string"}, + "pp_dir": {"type":"string"}, + "ptmp_dir": {"type":"string"}, + "refinediag_scripts":{"type":["string","null"]}, + "preanalysis_script":{"type":["string","null"]}, + "history_refined":{"type":["string","null"]}, + "analysis_dir":{"type":["string","null"]}, + "pp_grid_spec": {"type":"string"}, + "fre_analysis_home": {"type":["string","null"]} + }, + "pp": { + "type": "object", + "properties": { + "settings": { + "type:": "object", + "properties": { + "history_segment": {"type":"string"}, + "site": {"type":"string"}, + "pp_chunk_a": {"type":"string"}, + "pp_chunk_b": {"type":"string"}, + "pp_start": {"type":"string"}, + "pp_stop": {"type":"string"}, + "pp_components": {"type":"string"} + } + }, + "switches": { + "type": "object", + "properties": { + "clean_work": {"type":"boolean"}, + "do_mdtf": {"type":"boolean"}, + "do_statics": {"type":"boolean"}, + "do_timeavgs": {"type":"boolean"}, + "do_refinediag": {"type":"boolean"}, + "do_atmos_plevel_masking": {"type":"boolean"}, + "do_preanalysis": {"type":"boolean"}, + "do_analysis": {"type":"boolean"}, + "do_analysis_only": {"type":"boolean"} + } + }, + "components": { + "type": "array", + "properties": { + "type": {"type":"string"}, + "sources": {"type":"string"}, + "sourceGrid": {"type":"string"}, + "xyInterp": {"type":"string"}, + "interpMethod": {"type":"string"}, + "inputRealm": {"type":"string"} + } + } + } + } } } From d36233f98693e6e47b333b993115cd2f62cd8eb2 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 12:30:19 -0400 Subject: [PATCH 22/61] #141 Update messages and where validation happens --- fre/make/createCheckout.py | 8 ++------ fre/make/createCompile.py | 7 +------ fre/make/createDocker.py | 8 +------- fre/make/createMakefile.py | 7 +------ fre/make/gfdlfremake/yamlfre.py | 24 ++++++++++-------------- 5 files changed, 15 insertions(+), 39 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index 7c7ca760..c3e16ef4 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -47,10 +47,8 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job # If fre yammltools combine-yamls tools was used, the combined yaml should exist if Path(combined_path).exists(): - ## Make sure that the previously created combined yaml is valid - yamlfre.validate_yaml(combined_path) full_combined = combined_path - + print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,experiment,platform,target) @@ -58,13 +56,11 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() - # Validate the yaml - yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) - ## Open the yaml file and parse as fremakeYaml + ## Open the yaml file, validate the yaml, and parse as fremakeYaml modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index 12a5381b..a4b32b49 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -41,11 +41,8 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver combined_path=os.path.join(cd,combined) if Path(combined_path).exists: - ## Make sure that the previously created combined yaml is valid - yamlfre.validate_yaml(combined_path) - full_combined = combined_path - + print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,experiment,platform,target) @@ -53,8 +50,6 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() - # Validate the yaml - yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index bb621451..f359dce3 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -29,11 +29,8 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): combined_path=os.path.join(cd,combined) if Path(combined_path).exists: - ## Make sure that the previously created combined yaml is valid - yamlfre.validate_yaml(combined_path) - full_combined = combined_path - + print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,experiment,platform,target) @@ -41,8 +38,6 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() - # Validate the yaml - yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -95,7 +90,6 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): dockerBuild.writeDockerfileMkmf(c) dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") - ah0 currDir = os.getcwd() click.echo("\ntmpDir created in " + currDir + "/tmp") click.echo("Dockerfile created in " + currDir +"\n") diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 3f8d0c8b..1ca3cc10 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -28,11 +28,8 @@ def makefile_create(yamlfile,experiment,platform,target): combined_path=os.path.join(cd,combined) if Path(combined_path).exists: - ## Make sure that the previously created combined yaml is valid - yamlfre.validate_yaml(combined_path) - full_combined = combined_path - + print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,experiment,platform,target) @@ -40,8 +37,6 @@ def makefile_create(yamlfile,experiment,platform,target): comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() - # Validate the yaml - yamlfre.validate_yaml(full_combined) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 9a54f73a..6f638bbb 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -17,20 +17,6 @@ def parseCompile(fname,v): return y -## VALIDATION OF COMBINED YAML CAN ALSO HAPPEN IN FRE YAMLTOOLS COMBINE-YAML -def validate_yaml(yamlfile): - with open(yamlfile,'r') as yf: - cy = yaml.safe_load(yf) - ## Validate the YAML - fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) - schema_path = os.path.join(fremake_package_dir, 'schema.json') - with open(schema_path, 'r') as f: - s = f.read() - schema = json.loads(s) - - validate(instance=cy,schema=schema) - print("\nCOMBINED YAML VALID") - ##### THIS SEEMS UNUSED ## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will ## set a default value for the variable @@ -189,6 +175,16 @@ def __init__(self,combinedyaml,v): #self.freyaml.update(self.platformsyaml) + ## VALIDATION OF COMBINED YAML FOR COMPILATION + fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) + schema_path = os.path.join(fremake_package_dir, 'schema.json') + with open(schema_path, 'r') as f: + s = f.read() + schema = json.loads(s) + + validate(instance=self.freyaml,schema=schema) + print("\nCOMBINED YAML VALID") + def getCompileYaml(self): """ Brief: Returns the compile yaml From ac188521f4429fe2f032b12efe71b96e43b3c2f9 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 12:33:18 -0400 Subject: [PATCH 23/61] #141 Fix where validation happens --- fre/pp/configure_script_yaml.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 31746223..37ac0eeb 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -51,7 +51,7 @@ def validate_yaml(yamlfile): # Validate yaml # If the yaml is not valid, the schema validation will raise errors and exit if validate(instance=yml,schema=schema) is None: - print("YAML VALID") + print("COMBINED YAML VALID \n") #################### def rose_init(experiment,platform,target): @@ -170,8 +170,8 @@ def _yamlInfo(yamlfile,experiment,platform,target): # If fre yamltools combine-yamls tools was used, the combined yaml should exist if Path(combined_path).exists(): ## Make sure that the previously created combined yaml is valid - validate_yaml(str(combined_path)) full_combined = combined_path + print("\nNOTE: Yamls merged from combine-yamls tool") else: ## Combine yaml files to parse comb = cy.init_pp_yaml(yml,e,p,t) @@ -180,8 +180,8 @@ def _yamlInfo(yamlfile,experiment,platform,target): comb_analysis = comb.combine_analysis() full_combined = comb.clean_yaml() - # Validate yaml - validate_yaml(full_combined) + # Validate yaml + validate_yaml(full_combined) # Load the combined yaml comb_pp_yaml = yaml_load(full_combined) From 569de41bfa8ca52804ebc85796a38aa5eaf4bd60 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 12:35:54 -0400 Subject: [PATCH 24/61] #141 Create classes for combining yamls - classes were created so combining yamls can be done if user doesn't want to use `combine-yamls` tool - still keeps option to use `combine-yamls` as a separate tool - class for combining compile yamls - class for combining pp yamls --- fre/yamltools/combine_yamls.py | 258 +++++++++++++++++++++++---------- 1 file changed, 183 insertions(+), 75 deletions(-) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 3d0fdc01..a4047ed5 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -15,7 +15,6 @@ import shutil from pathlib import Path import click -from jsonschema import validate import yaml def join_constructor(loader, node): @@ -35,32 +34,32 @@ def yaml_load(yamlfile): return y -def combine_model(modelyaml,combined,experiment,platform,target): - """ - Create the combined.yaml and merge it with the model yaml - Arguments: - modelyaml : model yaml file - combined : final combined file name - experiment : experiment name - platform : platform used - target : targets used - """ - # copy model yaml info into combined yaml - with open(combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{experiment}"\n') - f1.write(f'platform: &platform "{platform}"\n') - f1.write(f'target: &target "{target}"\n\n') - with open(modelyaml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - print(f" model yaml: {modelyaml}") +#def combine_model(modelyaml,combined):#,experiment,platform,target): +# """ +# Create the combined.yaml and merge it with the model yaml +# Arguments: +# modelyaml : model yaml file +# combined : final combined file name +# experiment : experiment name +# platform : platform used +# target : targets used +# """ +# # copy model yaml info into combined yaml +# with open(combined,'w+',encoding='UTF-8') as f1: +# f1.write(f'name: &name "{experiment}"\n') +# f1.write(f'platform: &platform "{platform}"\n') +# f1.write(f'target: &target "{target}"\n\n') +# with open(modelyaml,'r',encoding='UTF-8') as f2: +# f1.write("### MODEL YAML SETTINGS ###\n") +# shutil.copyfileobj(f2,f1) +# print(f" model yaml: {modelyaml}") def experiment_check(mainyaml_dir,comb,experiment): """ Check that the experiment given is an experiment listed in the model yaml. Extract experiment specific information and file paths. Arguments: - mainyaml_dir : model yaml file + mainyaml_dir : model yaml file comb : combined yaml file name experiment : experiment name """ @@ -112,101 +111,210 @@ def experiment_check(mainyaml_dir,comb,experiment): return (py_path,cy_path,ey_path,ay_path) -def combine_compile(comb_m,compileyaml): +###### MAIN ##### +class init_compile_yaml(): + def __init__(self,yamlfile,experiment,platform,target): + """ + Process to combine yamls appllicable to compilation + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + Arguments: + modelyaml : model yaml file + combined : final combined file name + experiment : experiment name + platform : platform used + target : targets used + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + + print(f" model yaml: {self.yml}") + + def combine_compile(self): """ Combine compile yaml with the defined combined.yaml Arguments: comb_m : combined model yaml file - compileyaml : compile yaml file + compileyaml : compile yaml file """ - combined = comb_m + # Experiment Check + (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) # copy compile yaml info into combined yaml - if compileyaml is not None: - with open(combined,'a',encoding='UTF-8') as f1: - with open(compileyaml,'r',encoding='UTF-8') as f2: + if cy_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(cy_path,'r',encoding='UTF-8') as f2: f1.write("\n### COMPILE INFO ###\n") shutil.copyfileobj(f2,f1) - print(f" compile yaml: {compileyaml}") + print(f" compile yaml: {cy_path}") -def combine_platforms(comb_mc,platformsyaml): + def combine_platforms(self): """ Combine platforms yaml with the defined combined.yaml Arguments: comb_mc : combined model and compile yaml file platformsyaml : platforms yaml file """ - combined = comb_mc + # Experiment Check + (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + # combine platform yaml - if platformsyaml is not None: - with open(combined,'a',encoding='UTF-8') as f1: - with open(platformsyaml,'r',encoding='UTF-8') as f2: + if py_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(py_path,'r',encoding='UTF-8') as f2: f1.write("\n### PLATFORM INFO ###\n") shutil.copyfileobj(f2,f1) - print(f" platforms yaml: {platformsyaml}") + print(f" platforms yaml: {py_path}") -def combine_experiments(comb_mcp,expyaml): + def clean_yaml(self): + """ + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(full_yaml,f,sort_keys=False) + + print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") + return self.combined + +class init_pp_yaml(): + def __init__(self,yamlfile,experiment,platform,target): + """ + Process to combine the applicable yamls for post-processing + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + Arguments: + modelyaml : model yaml file + combined : final combined file name + experiment : experiment name + platform : platform used + target : targets used + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + + print(f" model yaml: {self.yml}") + + def combine_experiment(self): """ Combine experiment yamls with the defined combined.yaml Arguments: comb_mcp : combined model, compile, and platforms yaml file expyaml : experiment yaml files """ - combined = comb_mcp + # Experiment Check + (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + ## COMBINE EXPERIMENT YAML INFO - if expyaml is not None: - for i in expyaml: + if ey_path is not None: + for i in ey_path: #expyaml_path = os.path.join(mainyaml_dir, i) - with open(combined,'a',encoding='UTF-8') as f1: + with open(self.combined,'a',encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: #f1.write(f"\n### {i.upper()} settings ###\n") #copy expyaml into combined shutil.copyfileobj(f2,f1) print(f" experiment yaml: {i}") -def combine_analysis(comb_mcpe,analysisyaml): + def combine_analysis(self): """ Combine analysis yamls with the defined combined.yaml Arguments: comb_mcpe : combined model, compile, platforms, and experiment yaml file analysisyaml : analysis yaml file """ - combined = comb_mcpe + # Experiment Check + (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) ## COMBINE EXPERIMENT YAML INFO - if analysisyaml is not None: - for i in analysisyaml: + if ay_path is not None: + for i in ay_path: #analysisyaml_path = os.path.join(mainyaml_dir, i) - with open(combined,'a',encoding='UTF-8') as f1: + with open(self.combined,'a',encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: #f1.write(f"\n### {i.upper()} settings ###\n") #copy expyaml into combined shutil.copyfileobj(f2,f1) print(f" analysis yaml: {i}") -###### VALIDATE ##### FIX VALIDATION ##### -package_dir = os.path.dirname(os.path.abspath(__file__)) -schema_path = os.path.join(package_dir, 'schema.json') -def validate_yaml(file): - """ - Using the schema.json file, the yaml format is validated. - Arguments: - file : combined yaml file - """ - # Load the json schema: .load() (vs .loads()) reads and parses the json in one - with open(schema_path) as s: - schema = json.load(s) + def clean_yaml(self): + """ + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) - # Validate yaml - # If the yaml is not valid, the schema validation will raise errors and exit - if validate(instance=file,schema=schema) is None: - print("YAML VALID") + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] -###### MAIN ##### + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(full_yaml,f,sort_keys=False) + + print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") + return self.combined + +########################################################################################### def _consolidate_yamls(yamlfile,experiment, platform,target): - """ - Process to combine and validate the yamls - """ # Regsiter tag handler yaml.add_constructor('!join', join_constructor) @@ -218,23 +326,26 @@ def _consolidate_yamls(yamlfile,experiment, platform,target): print("Combining yaml files: ") - # Merge model into combined file - combine_model(yamlfile,combined,experiment,platform,target) + # Define yaml object + comb = init_compile_yaml(yamlfile,experiment, platform,target) - # Experiment check - (py_path,cy_path,ey_path,ay_path) = experiment_check(mainyaml_dir,combined,experiment) + # Merge model into combined file + comb.combine_model() # Merge compile.yaml into combined file - combine_compile(combined,cy_path) + comb.combine_compile() # Merge platforms.yaml into combined file - combine_platforms(combined,py_path) + comb.combine_platforms() + + # Define yaml object + comb = init_pp_yaml(yamlfile,experiment,platform,target) # Merge pp experiment yamls into combined file - combine_experiments(combined,ey_path) + comb.combine_experiment() # Merge pp analysis yamls, if defined, into combined file - combine_analysis(combined,ay_path) + comb.combine_analysis() # Load the fully combined yaml full_yaml = yaml_load(combined) @@ -250,9 +361,6 @@ def _consolidate_yamls(yamlfile,experiment, platform,target): yaml.safe_dump(full_yaml,f,sort_keys=False) print(f"Combined yaml located here: {os.path.dirname(combined)}/{combined}") -## TO-DO: fix schema for validation -# # validate yaml -# validate_yaml(full.yaml) @click.command() def consolidate_yamls(yamlfile,experiment, platform,target): From c03a35b7fd7aea54bb5182a60a1e19ed1d41a881 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 12:42:11 -0400 Subject: [PATCH 25/61] #141 Clean `createDocker.py` --- fre/make/createDocker.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index f359dce3..b670c4a4 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -66,18 +66,6 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName -# freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], -# libs = fremakeYaml["container_addlibs"], -# srcDir = srcDir, -# bldDir = bldDir, -# mkTemplatePath = mkTemplate, -# tmpDir = tmpDir) -# -# # Loop through components and send the component name and requires for the Makefile -# for c in fremakeYaml['src']: -# freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) -# freMakefile.writeMakefile() - dockerBuild = buildDocker.container(base = image, exp = fremakeYaml["experiment"], libs = fremakeYaml["container_addlibs"], From ba2fa1a33dae264ab0d5f5c7d411acbb19a9b741 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 13:04:21 -0400 Subject: [PATCH 26/61] #141 Update outfile for `configire_script_yaml` --- fre/pp/configure_script_yaml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 37ac0eeb..f8874bd0 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -197,7 +197,7 @@ def _yamlInfo(yamlfile,experiment,platform,target): print("Writing output files...") cylc_dir = os.path.join(os.path.expanduser("~/cylc-src"), f"{e}__{p}__{t}") outfile = os.path.join(cylc_dir, f"{e}.yaml") - shutil.copyfile(yml, outfile) + shutil.copyfile(full_combined, outfile) print(" " + outfile) dumper = metomi.rose.config.ConfigDumper() From 5ddfefdef1527e6f37b4f17300c8687b439b4cb0 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 13:06:39 -0400 Subject: [PATCH 27/61] #141 Fix `configure_script_yaml` test --- .../combined-c96L65_am5f7b12r1_amip.yaml | 230 ------------------ fre/pp/tests/test_configure_script_yaml.py | 6 +- 2 files changed, 3 insertions(+), 233 deletions(-) delete mode 100644 fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml diff --git a/fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml b/fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml deleted file mode 100644 index ec011cb2..00000000 --- a/fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml +++ /dev/null @@ -1,230 +0,0 @@ -name: c96L65_am5f7b12r1_amip -platform: gfdl.ncrc5-intel22-classic -target: prod-openmp -compile: - experiment: am5 - container_addlibs: null - baremetal_linkerflags: null - src: - - component: FMS - repo: https://github.com/NOAA-GFDL/FMS.git - cppdefs: -DHAVE_GETTID -Duse_libMPI -Duse_netCDF - branch: '2022.01' - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: am5_phys - requires: - - FMS - repo: https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git - branch: '2022.01' - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: GFDL_atmos_cubed_sphere - requires: - - FMS - - am5_phys - repo: https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git - cppdefs: -DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML - branch: '2022.01' - paths: - - GFDL_atmos_cubed_sphere/driver/GFDL - - GFDL_atmos_cubed_sphere/model - - GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90 - - GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90 - - GFDL_atmos_cubed_sphere/tools - - GFDL_atmos_cubed_sphere/GFDL_tools - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: atmos_drivers - requires: - - FMS - - am5_phys - - GFDL_atmos_cubed_sphere - repo: https://github.com/NOAA-GFDL/atmos_drivers.git - cppdefs: -DSPMD -DCLIMATE_NUDGE - branch: '2022.01' - paths: - - atmos_drivers/coupled - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: ice_sis - requires: - - FMS - - ice_param - - mom6 - repo: https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git - branch: '2021.02' - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include - - component: ice_param - repo: https://github.com/NOAA-GFDL/ice_param.git - cppdefs: -Duse_yaml -Duse_libMPI -Duse_netCDF - branch: '2021.02' - requires: - - FMS - - mom6 - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include - - component: land_lad2 - requires: - - FMS - repo: https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git - branch: land_lad2_2021.02 - doF90Cpp: true - cppdefs: -DINTERNAL_FILE_NML - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: mom6 - requires: - - FMS - paths: - - mom6/MOM6-examples/src/MOM6/config_src/dynamic - - mom6/MOM6-examples/src/MOM6/config_src/coupled_driver - - mom6/MOM6-examples/src/MOM6/src/*/ - - mom6/MOM6-examples/src/MOM6/src/*/*/ - - mom6/ocean_BGC/generic_tracers - - mom6/ocean_BGC/mocsy/src - branch: - - '2021.02' - - dev/gfdl/2018.04.06 - repo: - - https://github.com/NOAA-GFDL/ocean_BGC.git - - https://github.com/NOAA-GFDL/MOM6-examples.git - makeOverrides: OPENMP="" - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include - - component: FMScoupler - paths: - - FMScoupler/full - - FMScoupler/shared - repo: https://github.com/NOAA-GFDL/FMScoupler.git - branch: '2022.01' - requires: - - FMS - - atmos_drivers - - am5_phys - - land_lad2 - - ice_sis - - ice_param - - mom6 - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include -platforms: -- name: ncrc5.intel - compiler: intel - modulesInit: - - " module use -a /ncrc/home2/fms/local/modulefiles \n" - - "source $MODULESHOME/init/sh \n" - modules: - - intel-classic/2022.2.1 - - fre/bronx-20 - - cray-hdf5/1.12.2.3 - - cray-netcdf/4.9.0.3 - fc: ftn - cc: cc - mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk - modelRoot: ${HOME}/fremake_canopy/test -- name: ncrc5.intel23 - compiler: intel - modulesInit: - - " module use -a /ncrc/home2/fms/local/modulefiles \n" - - "source $MODULESHOME/init/sh \n" - modules: - - intel-classic/2023.1.0 - - fre/bronx-20 - - cray-hdf5/1.12.2.3 - - cray-netcdf/4.9.0.3 - fc: ftn - cc: cc - mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk - modelRoot: /ncrc/home1/Dana.Singh/fre/fre-cli/fre/make/tests/AM5_example/combine_yamls/test -- name: hpcme.2023 - compiler: intel - RUNenv: - - . /spack/share/spack/setup-env.sh - - spack load libyaml - - spack load netcdf-fortran@4.5.4 - - spack load hdf5@1.14.0 - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: true - containerBuild: podman - containerRun: apptainer -directories: - history_dir: /archive/$USER/am5/am5f7b12r1/c96L65_am5f7b12r1_amip/gfdl.ncrc5-intel22-classic-prod-openmp/history - pp_dir: /archive/$USER/am5/am5f7b12r1/c96L65_am5f7b12r1_amip/gfdl.ncrc5-intel22-classic-prod-openmp/pp - analysis_dir: /nbhome/$USER/am5/am5f7b12r1/c96L65_am5f7b12r1_amip - ptmp_dir: /ptmp/$USER - fre_analysis_home: /home/fms/local/opt/fre-analysis/test - pp_grid_spec: /archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar -postprocess: - settings: - history_segment: P1Y - site: ppan - pp_start: 19800101T0000Z - pp_stop: 20200101T0000Z - pp_chunk_a: P1Y - pp_components: atmos atmos_scalar - switches: - do_statics: false - do_timeavgs: true - clean_work: true - do_refinediag: false - do_atmos_plevel_masking: true - do_preanalysis: false - do_analysis: true - components: - - type: atmos_cmip - sources: atmos_month_cmip atmos_8xdaily_cmip atmos_daily_cmip - sourceGrid: cubedsphere - xyInterp: 180,360 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos - sources: atmos_month - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_level_cmip - sources: atmos_level_cmip - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_level - sources: atmos_month - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_month_aer - sources: atmos_month_aer - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: atmos - - type: atmos_diurnal - sources: atmos_diurnal - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_scalar - sources: atmos_scalar - - type: aerosol_cmip - xyInterp: 180,288 - sources: aerosol_month_cmip - sourceGrid: cubedsphere - interpMethod: conserve_order1 - inputRealm: atmos - - type: land - sources: land_month - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: land - - type: land_cmip - sources: land_month_cmip - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: land - - type: tracer_level - sources: atmos_tracer - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: atmos diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 70a1e74f..2c6790bd 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -10,7 +10,7 @@ # Set example yaml paths, input directory CWD = Path.cwd() test_dir = Path("fre/pp/tests") -test_yaml = Path(f"AM5_example/combined-{experiment}.yaml") +test_yaml = Path(f"AM5_example/am5.yaml")#combined-{experiment}.yaml") # Set home for ~/cylc-src location in script os.environ["HOME"]=str(Path(f"{CWD}/{test_dir}/configure_yaml_out")) @@ -32,10 +32,10 @@ def test_configure_script(): Path(out_dir).mkdir(parents=True,exist_ok=True) # Define combined yaml - comb_yaml = str(Path(f"{CWD}/{test_dir}/{test_yaml}")) + model_yaml = str(Path(f"{CWD}/{test_dir}/{test_yaml}")) # Invoke configure_yaml_script.py - csy._yamlInfo(comb_yaml,experiment,platform,target) + csy._yamlInfo(model_yaml,experiment,platform,target) # Check for configuration creation and final combined yaml assert all([Path(f"{out_dir}/{experiment}.yaml").exists(), From 7069840f904d49ab2b9577b0a655258955106cb1 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 15:07:06 -0400 Subject: [PATCH 28/61] #141 Add combine yamls functionality in `runFremake` tool --- fre/make/runFremake.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 1fbe8ff4..8ad0cbd8 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -16,7 +16,6 @@ @click.command() def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_parallel_checkout, verbose): - yml = yamlfile name = experiment nparallel = parallel @@ -42,11 +41,23 @@ def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_paral plist = platform tlist = target + # If fre yamltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): + full_combined = combined_path + print("\nNOTE: Yamls previously merged.") + else: + ## Combine yaml files to parse + comb = cy.init_compile_yaml(yml,experiment,platform,target) + comb_yaml = comb.combine_model() + comb_compile = comb.combine_compile() + comb_platform = comb.combine_platforms() + full_combined = comb.clean_yaml() + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyamlfull_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets From 721395c1b568f143a7b0640dbbb4774ea5590c8f Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 4 Sep 2024 15:19:53 -0400 Subject: [PATCH 29/61] #141 Add parenthesis --- fre/make/runFremake.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 8ad0cbd8..0a71f376 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -57,7 +57,7 @@ def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_paral freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyamlfull_combined,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets From 82c8a3ab99c5bfc7078b3cf06050c2597e6549c5 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 5 Sep 2024 13:51:16 -0400 Subject: [PATCH 30/61] #141 Remove `-e`,`--experiment` from compile tools --- fre/make/createCheckout.py | 8 ++++---- fre/make/createCompile.py | 8 ++++---- fre/make/createDocker.py | 8 ++++---- fre/make/createMakefile.py | 8 ++++---- fre/make/fremake.py | 35 +++++------------------------------ fre/make/runFremake.py | 8 ++++---- 6 files changed, 25 insertions(+), 50 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index c3e16ef4..ad522ee3 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -14,10 +14,10 @@ import yamltools.combine_yamls as cy @click.command() -def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,jobs,execute,verbose): +def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): # Define variables yml = yamlfile - name = experiment + name = yamlfile.split(".")[0] run = execute jobs = str(jobs) pcheck = no_parallel_checkout @@ -51,8 +51,8 @@ def checkout_create(yamlfile,experiment,platform,target,no_parallel_checkout,job print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse - comb = cy.init_compile_yaml(yml,experiment,platform,target) - comb_yaml = comb.combine_model() + comb = cy.init_compile_yaml(yml,platform,target) + comb_model = comb.combine_model() comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index a4b32b49..8dc3905e 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -14,10 +14,10 @@ import yamltools.combine_yamls as cy @click.command() -def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,verbose): +def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): # Define variables yml = yamlfile - name = experiment + name = yamlfile.split(".")[0] nparallel = parallel jobs = str(jobs) run = execute @@ -45,8 +45,8 @@ def compile_create(yamlfile,experiment,platform,target,jobs,parallel,execute,ver print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse - comb = cy.init_compile_yaml(yml,experiment,platform,target) - comb_yaml = comb.combine_model() + comb = cy.init_compile_yaml(yml,platform,target) + comb_model = comb.combine_model() comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index b670c4a4..a775dc9b 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -12,7 +12,7 @@ import yamltools.combine_yamls as cy @click.command() -def dockerfile_create(yamlfile, experiment, platform, target, execute): +def dockerfile_create(yamlfile,platform,target,execute): srcDir="src" checkoutScriptName = "checkout.sh" baremetalRun = False # This is needed if there are no bare metal runs @@ -20,7 +20,7 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): plist = platform tlist = target yml = yamlfile - name = experiment + name = yamlfile.split(".")[0] run = execute ## If combined yaml does not exist, combine model, compile, and platform yamls @@ -33,8 +33,8 @@ def dockerfile_create(yamlfile, experiment, platform, target, execute): print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse - comb = cy.init_compile_yaml(yml,experiment,platform,target) - comb_yaml = comb.combine_model() + comb = cy.init_compile_yaml(yml,platform,target) + comb_model = comb.combine_model() comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 1ca3cc10..ce95bc6d 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -12,7 +12,7 @@ import yamltools.combine_yamls as cy @click.command() -def makefile_create(yamlfile,experiment,platform,target): +def makefile_create(yamlfile,platform,target): srcDir="src" checkoutScriptName = "checkout.sh" baremetalRun = False # This is needed if there are no bare metal runs @@ -20,7 +20,7 @@ def makefile_create(yamlfile,experiment,platform,target): plist = platform tlist = target yml = yamlfile - name = experiment + name = yamlfile.split(".")[0] ## If combined yaml does not exist, combine model, compile, and platform yamls cd = Path.cwd() @@ -32,8 +32,8 @@ def makefile_create(yamlfile,experiment,platform,target): print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse - comb = cy.init_compile_yaml(yml,experiment,platform,target) - comb_yaml = comb.combine_model() + comb = cy.init_compile_yaml(yml,platform,target) + comb_model = comb.combine_model() comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() diff --git a/fre/make/fremake.py b/fre/make/fremake.py index ad28b58a..54946349 100644 --- a/fre/make/fremake.py +++ b/fre/make/fremake.py @@ -40,11 +40,6 @@ def make_cli(): type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements -@click.option("-e", - "--experiment", - type = str, - help = experiment_opt_help, - required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -76,7 +71,7 @@ def make_cli(): is_flag = True, help = verbose_opt_help) @click.pass_context -def run_fremake(context, experiment, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): +def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): """ - Perform all fremake functions to run checkout and compile model""" context.forward(fremake_run) @@ -87,11 +82,6 @@ def run_fremake(context, experiment, yamlfile, platform, target, parallel, jobs, type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements -@click.option("-e", - "--experiment", - type = str, - help = experiment_opt_help, - required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -122,7 +112,7 @@ def run_fremake(context, experiment, yamlfile, platform, target, parallel, jobs, is_flag = True, help = verbose_opt_help) @click.pass_context -def create_checkout(context,yamlfile,experiment,platform,target,no_parallel_checkout,jobs,execute,verbose): +def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): """ - Write the checkout script """ context.forward(checkout_create) @@ -133,11 +123,6 @@ def create_checkout(context,yamlfile,experiment,platform,target,no_parallel_chec type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements -@click.option("-e", - "--experiment", - type = str, - help = experiment_opt_help, - required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -149,7 +134,7 @@ def create_checkout(context,yamlfile,experiment,platform,target,no_parallel_chec help = target_opt_help, required = True) @click.pass_context -def create_makefile(context,experiment,yamlfile,platform,target): +def create_makefile(context,yamlfile,platform,target): """ - Write the makefile """ context.forward(makefile_create) @@ -161,11 +146,6 @@ def create_makefile(context,experiment,yamlfile,platform,target): type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements -@click.option("-e", - "--experiment", - type = str, - help = experiment_opt_help, - required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -196,7 +176,7 @@ def create_makefile(context,experiment,yamlfile,platform,target): is_flag = True, help = verbose_opt_help) @click.pass_context -def create_compile(context,experiment,yamlfile,platform,target,jobs,parallel,execute,verbose): +def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbose): """ - Write the compile script """ context.forward(compile_create) @@ -206,11 +186,6 @@ def create_compile(context,experiment,yamlfile,platform,target,jobs,parallel,exe type = str, help = yamlfile_opt_help, required = True) # use click.option() over click.argument(), we want help statements -@click.option("-e", - "--experiment", - type = str, - help = experiment_opt_help, - required = True) @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() @@ -225,7 +200,7 @@ def create_compile(context,experiment,yamlfile,platform,target,jobs,parallel,exe is_flag = True, help = "Build Dockerfile that has been generated by create-docker.") @click.pass_context -def create_dockerfile(context,experiment,yamlfile,platform,target,execute): +def create_dockerfile(context,yamlfile,platform,target,execute): """ - Write the dockerfile """ context.forward(dockerfile_create) diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 0a71f376..fe024037 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -15,9 +15,9 @@ @click.command() -def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_parallel_checkout, verbose): +def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): yml = yamlfile - name = experiment + name = yamlfile.split(".")[0] nparallel = parallel jobs = str(jobs) pcheck = no_parallel_checkout @@ -47,8 +47,8 @@ def fremake_run(yamlfile, experiment, platform, target, parallel, jobs, no_paral print("\nNOTE: Yamls previously merged.") else: ## Combine yaml files to parse - comb = cy.init_compile_yaml(yml,experiment,platform,target) - comb_yaml = comb.combine_model() + comb = cy.init_compile_yaml(yml,platform,target) + comb_model = comb.combine_model() comb_compile = comb.combine_compile() comb_platform = comb.combine_platforms() full_combined = comb.clean_yaml() From 62362d3cf4fae301d3c0f92933e171069e5f970b Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 5 Sep 2024 13:52:12 -0400 Subject: [PATCH 31/61] #141 Make `build` a separate section --- fre/make/tests/AM5_example/am5.yaml | 32 ++++++++++++++--------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/fre/make/tests/AM5_example/am5.yaml b/fre/make/tests/AM5_example/am5.yaml index 20dee565..05d5b9e3 100644 --- a/fre/make/tests/AM5_example/am5.yaml +++ b/fre/make/tests/AM5_example/am5.yaml @@ -38,12 +38,12 @@ fre_properties: - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" -shared: +build: # compile information - compile: - compileYaml: &compile_yaml "compile.yaml" - platformYaml: "yaml_include/platforms.yaml" - + compileYaml: "compile.yaml" + platformYaml: "yaml_include/platforms.yaml" + +shared: # directories shared across tools directories: &shared_directories history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] @@ -70,46 +70,46 @@ experiments: - name: "c96L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c384L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c384_amip.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c384_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c96_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: *compile_yaml +# compile: *compile_yaml - name: "c96L65_am5f7b12r1_amip_cosp" pp: - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml +# compile: *compile_yaml From 5257265695f1fe117ce9ce4a2992be30a172d495 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 5 Sep 2024 13:54:42 -0400 Subject: [PATCH 32/61] #141 Combine compile and platform yamls without `experiment` - also added `--use` click option to yamltools if user wants to use `combine-yamls` tool - this made is easy to create a combined compile and combined pp yaml when needed(without any `undefined alias`, etc issues) --- fre/yamltools/combine_yamls.py | 172 ++++++++++++--------------------- 1 file changed, 61 insertions(+), 111 deletions(-) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index a4047ed5..1520f3bb 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -8,7 +8,6 @@ ## TO-DO: # - figure out way to safe_load (yaml_loader=yaml.SafeLoader?) # - condition where there are multiple pp and analysis yamls -# - fix schema for validation import os import json @@ -34,25 +33,28 @@ def yaml_load(yamlfile): return y -#def combine_model(modelyaml,combined):#,experiment,platform,target): -# """ -# Create the combined.yaml and merge it with the model yaml -# Arguments: -# modelyaml : model yaml file -# combined : final combined file name -# experiment : experiment name -# platform : platform used -# target : targets used -# """ -# # copy model yaml info into combined yaml -# with open(combined,'w+',encoding='UTF-8') as f1: -# f1.write(f'name: &name "{experiment}"\n') -# f1.write(f'platform: &platform "{platform}"\n') -# f1.write(f'target: &target "{target}"\n\n') -# with open(modelyaml,'r',encoding='UTF-8') as f2: -# f1.write("### MODEL YAML SETTINGS ###\n") -# shutil.copyfileobj(f2,f1) -# print(f" model yaml: {modelyaml}") +def get_compile_paths(mainyaml_dir,comb): + """ + Extract compile and platform paths from model yaml + """ + comb_model=yaml_load(comb) + + # set platform yaml filepath + if comb_model["build"]["platformYaml"] is not None: + py=comb_model["build"]["platformYaml"] + py_path=Path(os.path.join(mainyaml_dir,py)) + else: + py_path=None + + # set compile yaml filepath + if comb_model["build"]["compileYaml"] is not None: + cy=comb_model["build"]["compileYaml"] + cy_path=Path(os.path.join(mainyaml_dir,cy)) + else: + cy_path=None + + return (py_path,cy_path) + def experiment_check(mainyaml_dir,comb,experiment): """ @@ -73,26 +75,13 @@ def experiment_check(mainyaml_dir,comb,experiment): if experiment not in exp_list: raise NameError(f"{experiment} is not in the list of experiments") - # set platform yaml filepath - if comb_model["shared"]["compile"]["platformYaml"] is not None: - py=comb_model["shared"]["compile"]["platformYaml"] - py_path=Path(os.path.join(mainyaml_dir,py)) - else: - py_path=None - # Extract compile yaml path for exp. provided # if experiment matches name in list of experiments in yaml, extract file path for i in comb_model.get("experiments"): if experiment == i.get("name"): - compileyaml=i.get("compile") expyaml=i.get("pp") analysisyaml=i.get("analysis") - if compileyaml is not None: - cy_path=Path(os.path.join(mainyaml_dir,compileyaml)) - else: - cy_path=None - if expyaml is not None: ey_path=[] for e in expyaml: @@ -109,16 +98,15 @@ def experiment_check(mainyaml_dir,comb,experiment): else: ay_path=None - return (py_path,cy_path,ey_path,ay_path) + return (ey_path,ay_path) -###### MAIN ##### class init_compile_yaml(): - def __init__(self,yamlfile,experiment,platform,target): + def __init__(self,yamlfile,platform,target): """ Process to combine yamls appllicable to compilation """ self.yml = yamlfile - self.name = experiment + self.name = yamlfile.split(".")[0] self.platform = platform self.target = target @@ -133,16 +121,9 @@ def __init__(self,yamlfile,experiment,platform,target): print("Combining yaml files: ") - def combine_model(self): """ Create the combined.yaml and merge it with the model yaml - Arguments: - modelyaml : model yaml file - combined : final combined file name - experiment : experiment name - platform : platform used - target : targets used """ # copy model yaml info into combined yaml with open(self.combined,'w+',encoding='UTF-8') as f1: @@ -158,12 +139,9 @@ def combine_model(self): def combine_compile(self): """ Combine compile yaml with the defined combined.yaml - Arguments: - comb_m : combined model yaml file - compileyaml : compile yaml file """ - # Experiment Check - (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) # copy compile yaml info into combined yaml if cy_path is not None: @@ -176,12 +154,9 @@ def combine_compile(self): def combine_platforms(self): """ Combine platforms yaml with the defined combined.yaml - Arguments: - comb_mc : combined model and compile yaml file - platformsyaml : platforms yaml file """ - # Experiment Check - (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) # combine platform yaml if py_path is not None: @@ -193,6 +168,8 @@ def combine_platforms(self): def clean_yaml(self): """ + Clean the yaml; remove unnecessary sections in + final combined yaml. """ # Load the fully combined yaml full_yaml = yaml_load(self.combined) @@ -234,12 +211,6 @@ def __init__(self,yamlfile,experiment,platform,target): def combine_model(self): """ Create the combined.yaml and merge it with the model yaml - Arguments: - modelyaml : model yaml file - combined : final combined file name - experiment : experiment name - platform : platform used - target : targets used """ # copy model yaml info into combined yaml with open(self.combined,'w+',encoding='UTF-8') as f1: @@ -255,12 +226,9 @@ def combine_model(self): def combine_experiment(self): """ Combine experiment yamls with the defined combined.yaml - Arguments: - comb_mcp : combined model, compile, and platforms yaml file - expyaml : experiment yaml files """ # Experiment Check - (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) ## COMBINE EXPERIMENT YAML INFO if ey_path is not None: @@ -276,12 +244,9 @@ def combine_experiment(self): def combine_analysis(self): """ Combine analysis yamls with the defined combined.yaml - Arguments: - comb_mcpe : combined model, compile, platforms, and experiment yaml file - analysisyaml : analysis yaml file """ # Experiment Check - (py_path,cy_path,ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) ## COMBINE EXPERIMENT YAML INFO if ay_path is not None: @@ -296,6 +261,8 @@ def combine_analysis(self): def clean_yaml(self): """ + Clean the yaml; remove unnecessary sections in + final combined yaml. """ # Load the fully combined yaml full_yaml = yaml_load(self.combined) @@ -314,61 +281,44 @@ def clean_yaml(self): return self.combined ########################################################################################### -def _consolidate_yamls(yamlfile,experiment, platform,target): +def _consolidate_yamls(yamlfile,experiment,platform,target,use): # Regsiter tag handler yaml.add_constructor('!join', join_constructor) # Path to the main model yaml mainyaml_dir = os.path.dirname(yamlfile) - # Name of the combined yaml - combined=f"combined-{experiment}.yaml" - - print("Combining yaml files: ") - - # Define yaml object - comb = init_compile_yaml(yamlfile,experiment, platform,target) - - # Merge model into combined file - comb.combine_model() - - # Merge compile.yaml into combined file - comb.combine_compile() - - # Merge platforms.yaml into combined file - comb.combine_platforms() - - # Define yaml object - comb = init_pp_yaml(yamlfile,experiment,platform,target) - - # Merge pp experiment yamls into combined file - comb.combine_experiment() - - # Merge pp analysis yamls, if defined, into combined file - comb.combine_analysis() - - # Load the fully combined yaml - full_yaml = yaml_load(combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - with open(combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,sort_keys=False) - - print(f"Combined yaml located here: {os.path.dirname(combined)}/{combined}") + if use == "compile": + # Define yaml object + comb = init_compile_yaml(yamlfile,platform,target) + # Merge model into combined file + comb.combine_model() + # Merge compile.yaml into combined file + comb.combine_compile() + # Merge platforms.yaml into combined file + comb.combine_platforms() + # Clean the yaml + comb.clean_yaml() + + if use =="pp": + # Define yaml object + comb = init_pp_yaml(yamlfile,experiment,platform,target) + # Merge model into combined file + comb.combine_model() + # Merge pp experiment yamls into combined file + comb.combine_experiment() + # Merge pp analysis yamls, if defined, into combined file + comb.combine_analysis() + # Clean the yaml + comb.clean_yaml() @click.command() -def consolidate_yamls(yamlfile,experiment, platform,target): +def consolidate_yamls(yamlfile,experiment,platform,target,use): ''' Wrapper script for calling yaml_combine - allows the decorated version of the function to be separate from the undecorated version ''' - return _consolidate_yamls(yamlfile,experiment, platform,target) + return _consolidate_yamls(yamlfile,experiment,platform,target,use) # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': From 6923b63f6783ccb939831715d48cd5cdcb84aa07 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 5 Sep 2024 13:56:47 -0400 Subject: [PATCH 33/61] #141 Create `--use` functionality - user can specify if combining tool is being used for compile or postprocessing - makes separate full combined yamls - this was because `name`, `platform, and `target` vary if compiling vs postprocessing --- fre/yamltools/freyamltools.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index d190b243..70ff149a 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -25,8 +25,7 @@ def function(context, uppercase): @click.option("-e", "--experiment", type=str, - help="Experiment name", - required=True) + help="Experiment name") @click.option("-p", "--platform", type=str, @@ -37,8 +36,12 @@ def function(context, uppercase): type=str, help="Target name", required=True) +@click.option("--use", + type=str, + help="Process user is combining yamls for. Can pass 'compile' or 'pp'", + required=True) @click.pass_context -def combine_yamls(context,yamlfile,experiment,platform,target): +def combine_yamls(context,yamlfile,experiment,platform,target,use): """ - Combine the model yaml with the compile, platform, experiment, and analysis yamls From ff90e16064add36ce81667a096f6967c0c0a16eb Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 5 Sep 2024 14:23:11 -0400 Subject: [PATCH 34/61] #141 Update `fre yamltools combine-yamls` test - to reflect changes in model yaml for compilation --- fre/yamltools/tests/AM5_example/am5.yaml | 21 ++++------------- fre/yamltools/tests/test_combine_yamls.py | 28 +++++++++++++++++++++-- 2 files changed, 31 insertions(+), 18 deletions(-) diff --git a/fre/yamltools/tests/AM5_example/am5.yaml b/fre/yamltools/tests/AM5_example/am5.yaml index 05391f3b..07e22a07 100644 --- a/fre/yamltools/tests/AM5_example/am5.yaml +++ b/fre/yamltools/tests/AM5_example/am5.yaml @@ -38,12 +38,12 @@ fre_properties: - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" -shared: +build: # compile information - compile: - compileYaml: &compile_yaml "compile.yaml" - platformYaml: "yaml_include/platforms.yaml" - + compileYaml: "compile.yaml" + platformYaml: "yaml_include/platforms.yaml" + +shared: # directories shared across tools directories: &shared_directories history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] @@ -70,46 +70,35 @@ experiments: - name: "c96L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c384_amip.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c384_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c96_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_amip_cosp" pp: - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index 79d58a67..0f8e2fbd 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -56,7 +56,29 @@ def test_analysisyaml_exists(): """ assert Path(f"{in_dir}/yaml_include/analysis.yaml").exists() -def test_merged_yamls(): +def test_merged_compile_yamls(): + """ + Check for the creation of the combined-[experiment] yaml + Check that the model yaml was merged into the combined yaml + """ + # Go into the input directory + os.chdir(in_dir) + + # Model yaml path + modelyaml = "am5.yaml" + + USE_COMPILE = "compile" + + # Merge the yamls + cy._consolidate_yamls(modelyaml, EXPERIMENT, PLATFORM, TARGET, USE_COMPILE) + + # Move combined yaml to output location + shutil.move(f"combined-am5.yaml", out_dir) + + # Check that the combined yaml exists + assert Path(f"{out_dir}/combined-am5.yaml").exists() + +def test_merged_pp_yamls(): """ Check for the creation of the combined-[experiment] yaml Check that the model yaml was merged into the combined yaml @@ -64,8 +86,10 @@ def test_merged_yamls(): # Model yaml path modelyaml = Path(f"{in_dir}/am5.yaml") + USE_PP = "pp" + # Merge the yamls - cy._consolidate_yamls(modelyaml,EXPERIMENT, PLATFORM, TARGET) + cy._consolidate_yamls(modelyaml, EXPERIMENT, PLATFORM, TARGET, USE_PP) # Move combined yaml to output location shutil.move(f"combined-{EXPERIMENT}.yaml", out_dir) From b4d456ab28d8f3fcb0b0839492fd994552b9128f Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 9 Sep 2024 12:40:29 -0400 Subject: [PATCH 35/61] #141 Symlink runscript to more general location in container - for easy reference in frerun (model container integration) --- fre/make/gfdlfremake/buildDocker.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 4eb268b7..37b25fa1 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -58,11 +58,11 @@ def writeDockerfileCheckout(self, cScriptName, cOnDisk): - cScriptName : The name of the checkout script in the container - cOnDisk : The relative path to the checkout script on disk """ - self.checkoutPath = "/apps/"+self.e+"/src/"+ cScriptName + self.checkoutPath = self.src+"/"+ cScriptName self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") - self.d.write("RUN chmod 744 /apps/"+self.e+"/src/checkout.sh \n") + self.d.write("RUN chmod 744 "+self.src+"/checkout.sh \n") self.d.writelines(self.setup) - self.d.write(" && /apps/"+self.e+"/src/checkout.sh \n") + self.d.write(" && "+self.src+"/checkout.sh \n") # Clone mkmf self.d.writelines(self.mkmfclone) @@ -170,6 +170,9 @@ def writeRunscript(self,RUNenv,containerRun,runOnDisk): self.d.write("COPY "+runOnDisk+" "+self.bld+"/execrunscript.sh\n") #make runscript executable self.d.write("RUN chmod 744 "+self.bld+"/execrunscript.sh\n") + #link runscript to more general location (for frerun container usage) + self.d.write("RUN mkdir -p /apps/bin \ \n") + self.d.write(" && ln -sf "+self.bld+"/execrunscript.sh "+"/apps/bin/execrunscript.sh") #finish the dockerfile self.d.writelines(self.setup) self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") From 644e64dcab6b940b4178f39f96356296fc536c8b Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 10 Sep 2024 12:08:06 -0400 Subject: [PATCH 36/61] #141 Remove README for now (will update) - dont want to push wrong or outdated info - will work on updating readme in another issue --- fre/make/README.md | 181 --------------------------------------------- 1 file changed, 181 deletions(-) delete mode 100644 fre/make/README.md diff --git a/fre/make/README.md b/fre/make/README.md deleted file mode 100644 index 6182e677..00000000 --- a/fre/make/README.md +++ /dev/null @@ -1,181 +0,0 @@ -# **Fremake Canopy** -Through the fre-cli, `fre make` can be used to create and run a checkout script, makefile, and compile a model. - -* Fremake Canopy Supports: - - multiple targets, would have to use one `-t` flag for each one - - bare-metal build - - container creation - - parallel checkouts for bare-metal build** - -* **Note: Users will not be able to create containers without access to podman** - -The fremake canopy fre-cli subcommands are described below ([Subcommands](#subcommands)), as well as a Guide on the order in which to use them ([Guide](#guide)). - -Additionally, as mentioned, multiple targets can be used more multiple target-platform combinations. Below is an example of this usage for both the bare-metal build and container build, using the AM5 model - -- [Bare-metal Example](#bare-metal-build-multi-target-example) -- [Container Example](#container-build-multi-target-example) - -## **Usage (Users)** -* Refer to fre-cli [README.md](https://github.com/NOAA-GFDL/fre-cli/blob/main/README.md) for foundational fre-cli usage guide and tips. -* Fremake package repository located at: https://gitlab.gfdl.noaa.gov/portable_climate/fremake_canopy/-/tree/main - - -## Subcommands -- `fre make create-checkout [options]` - - Purpose: Creates the checkout script and can check out source code (with execute option) - - Options: - - `-y, --yamlfile [experiment yaml] (required)` - - `-p, --platform [platform] (required)` - - `-t, --target [target] (required)` - - `-j, --jobs [number of jobs to run simultneously]` - - `-npc, --no-parallel-checkout (for container build)` - - `-e, --execute` - -- `fre make create-makefile [options]` - - Purpose: Creates the makefile - - Options: - - `-y, --yamlfile [experiment yaml] (required)` - - `-p, --platform [platform] (required)` - - `-t, --target [target] (required)` - -- `fre make create-compile [options]` - - Purpose: Creates the compile script and compiles the model (with execute option) - - Options: - - `-y, --yamlfile [experiment yaml] (required)` - - `-p, --platform [platform] (required)` - - `-t, --target [target] (required)` - - `-j, --jobs [number of jobs to run simultneously]` - - `-n, --parallel [number of concurrent modile compiles]` - - `-e, --execute` - -- `fre make create-dockerfile [options]` - - Purpose: Creates the dockerfile and creates the container (with execute option) - - With the creation of the dockerfile, the Makefile, checkout script, and any other necessary script is copied into the container from a temporary location - - Options: - - `-y, --yamlfile [experiment yaml] (required)` - - `-p, --platform [platform] (required)` - - `-t, --target [target] (required)` - - `-e, --execute` - -- `fre make run-fremake [options]` - - Purpose: Create the checkout script, Makefile, compile script, and dockerfile (platform dependent) for the compilation of the model - - Options: - - `-y, --yamlfile [experiment yaml] (required)` - - `-p, --platform [platform] (required)` - - `-t, --target [target] (required)` - - `-npc, --no-parallel-checkout (for container build)` - - `-j, --jobs [number of jobs to run simultneously]` - - `-n, --parallel [number of concurrent modile compiles]` - -## Guide -In order to use the `fre make` tools, remember to create a combined yaml first. This can be done with the `fre yamltools combine-yamls` tool. This combines the model, compile, platform, experiment, and any analysis yamls into ONE yaml file for parsing and validation. - -To combine: -`fre yamltools combine-yamls -y [model yaml file] -e [experiment name] -p [platform] -t [target]` - -### **Bare-metal Build:** -```bash -## NOTE: Remember to create the combined yaml first! -## The targets used in fremake are taken from the fre make command itself -# Create combined yaml -fre yamltools combine-yamls -y [model yaml file] -e [experiment name] -p [platform] -t [target] - -# Create checkout script -fre make create-checkout -y [combined yaml file] -e [experiment name] -p [platform] -t [target] - -# Create and run checkout script -fre make create-checkout -y [combined yaml file] -e [experiment name] -p [platform] -t [target] --execute - -# Create Makefile -fre make create-makefile -y [combined yaml file] -e [experiment name] -p [platform] -t [target] - -# Creat the compile script -fre make create-compile -y [combined yaml file] -e [experiment name] -p [platform] -t [target] - -# Create and run the compile script -fre make create-compile -y [combined yaml file] -e [experiment name] -p [platform] -t [target] --execute - -# Run all of fremake -fre make run-fremake -y [combined yaml] -e [experiment name] -p [platform] -t [target] [other options...] -``` - -### **Bare-metal Build (Multi-target example):** -```bash -## NOTE: Remember to create the combined yaml first! -## The targets used in fremake are taken from the fre make command itself -# Create combined yaml -fre yamltools combine-yamls -y am5.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t debug - -# Create checkout script -fre make create-checkout -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug - -# Create and run checkout script -fre make create-checkout -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug --execute - -# Create Makefile -fre make create-makefile -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug - -# Creat the compile script -fre make create-compile -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug - -# Create and run the compile script -fre make create-compile -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug --execute - -# Run all of fremake -fre make run-fremake -y combined-c96L65_am5f7b12r1_amip.yaml -e c96L65_am5f7b12r1_amip -p ncrc5.intel23 -t prod-openmp -t debug -``` - -### **Container Build:** -For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. ***To reiterate, users will not be able to create containers unless they have podman access on gaea.*** -```bash -## NOTE: Remember to create the combined yaml first! -## The targets used in fremake are taken from the fre make command itself -# Create combined yaml -fre yamltools combine-yamls -y [model yaml] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] - -# Create checkout script -fre make create-checkout -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] -npc - -# Create and run checkout script -fre make create-checkout -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] --execute -npc - -# Create Makefile -fre make create-makefile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] - -# Create the compile script -fre make create-compile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM]-t [target] - -# Create and run the compile script -fre make create-compile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM]-t [target] --execute - -#Create a Dockerfile -fre make create-dockerfile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] - -# Create and run the Dockerfile -fre make create-dockerfile -y [combined yaml file] -e [experiment name] -p [CONTAINER PLATFORM] -t [target] --execute -``` -### **Container Build (Multi-target example):** -```bash -# NOTE: multi-target will be taken from fre make commands -# Create combined yaml -fre yamltools combine-yamls -y am5.yaml -e c96L65_am5f7b12r1_amip -p hpcme.2023 -t debug - -# Create checkout script -fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -npc - -# Create and run checkout script -fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -npc -e - -# Create Makefile -fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug - -# Creat the compile script -fre make create-compile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug - -# Create and run the compile script -fre make create-compile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -e - -# Run all of fremake -fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug [other options...] -npc -``` From 1256ec54164428492c12caa0f846318421d6720c Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 10 Sep 2024 13:03:05 -0400 Subject: [PATCH 37/61] #141 Create separate function to combine yamls - create separate functions to call for combining compile and pp yamls - Also: fix import for combine_yamls functions, change `SystemExit` to `ValueError` --- fre/make/createCheckout.py | 15 +++------ fre/make/createCompile.py | 18 ++++------- fre/make/createDocker.py | 18 ++++------- fre/make/createMakefile.py | 18 ++++------- fre/make/runFremake.py | 22 ++++++------- fre/pp/configure_script_yaml.py | 12 +++---- fre/yamltools/combine_yamls.py | 56 +++++++++++++++++++++------------ 7 files changed, 73 insertions(+), 86 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index ad522ee3..d264ff15 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -7,11 +7,7 @@ import click from pathlib import Path from .gfdlfremake import varsfre, platformfre, yamlfre, checkout, targetfre - -# Relative import -f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -sys.path.append(f) -import yamltools.combine_yamls as cy +import fre.yamltools.combine_yamls as cy @click.command() def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): @@ -45,17 +41,14 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v combined = Path(f"combined-{name}.yaml") combined_path=os.path.join(cd,combined) + # Combine model, compile, and platform yamls # If fre yammltools combine-yamls tools was used, the combined yaml should exist if Path(combined_path).exists(): full_combined = combined_path print("\nNOTE: Yamls previously merged.") else: - ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,platform,target) - comb_model = comb.combine_model() - comb_compile = comb.combine_compile() - comb_platform = comb.combine_platforms() - full_combined = comb.clean_yaml() + full_combined = cy.get_combined_compileyaml(comb) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -76,7 +69,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in platforms.yaml") #modelYaml.combined.get("compile").get("platformYaml")) + raise ValueError (platformName + " does not exist in platforms.yaml") #modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the source directory for the platform diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index 8dc3905e..07745bbc 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -7,11 +7,7 @@ from multiprocessing.dummy import Pool import click from .gfdlfremake import varsfre, platformfre, yamlfre, targetfre, buildBaremetal - -# Relative import -f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -sys.path.append(f) -import yamltools.combine_yamls as cy +import fre.yamltools.combine_yamls as cy @click.command() def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): @@ -40,16 +36,14 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): combined = Path(f"combined-{name}.yaml") combined_path=os.path.join(cd,combined) - if Path(combined_path).exists: + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): full_combined = combined_path print("\nNOTE: Yamls previously merged.") else: - ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,platform,target) - comb_model = comb.combine_model() - comb_compile = comb.combine_compile() - comb_platform = comb.combine_platforms() - full_combined = comb.clean_yaml() + full_combined = cy.get_combined_compileyaml(comb) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -70,7 +64,7 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index a775dc9b..7170ac97 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -5,11 +5,7 @@ from pathlib import Path import click from .gfdlfremake import varsfre, targetfre, makefilefre, platformfre, yamlfre, buildDocker - -# Relative import -f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -sys.path.append(f) -import yamltools.combine_yamls as cy +import fre.yamltools.combine_yamls as cy @click.command() def dockerfile_create(yamlfile,platform,target,execute): @@ -28,16 +24,14 @@ def dockerfile_create(yamlfile,platform,target,execute): combined = Path(f"combined-{name}.yaml") combined_path=os.path.join(cd,combined) - if Path(combined_path).exists: + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): full_combined = combined_path print("\nNOTE: Yamls previously merged.") else: - ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,platform,target) - comb_model = comb.combine_model() - comb_compile = comb.combine_compile() - comb_platform = comb.combine_platforms() - full_combined = comb.clean_yaml() + full_combined = cy.get_combined_compileyaml(comb) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -54,7 +48,7 @@ def dockerfile_create(yamlfile,platform,target,execute): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index ce95bc6d..9758c5a5 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -5,11 +5,7 @@ from pathlib import Path import click from .gfdlfremake import makefilefre, varsfre, targetfre, yamlfre - -# Relative import -f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -sys.path.append(f) -import yamltools.combine_yamls as cy +import fre.yamltools.combine_yamls as cy @click.command() def makefile_create(yamlfile,platform,target): @@ -27,16 +23,14 @@ def makefile_create(yamlfile,platform,target): combined = Path(f"combined-{name}.yaml") combined_path=os.path.join(cd,combined) - if Path(combined_path).exists: + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): full_combined = combined_path print("\nNOTE: Yamls previously merged.") else: - ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,platform,target) - comb_model = comb.combine_model() - comb_compile = comb.combine_compile() - comb_platform = comb.combine_platforms() - full_combined = comb.clean_yaml() + full_combined = cy.get_combined_compileyaml(comb) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -53,7 +47,7 @@ def makefile_create(yamlfile,platform,target): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index fe024037..821336c5 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -8,11 +8,9 @@ import os import logging from multiprocessing.dummy import Pool - import click - from .gfdlfremake import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal - +import fre.yamltools.combine_yamls as cy @click.command() def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): @@ -41,17 +39,19 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb plist = platform tlist = target - # If fre yamltools combine-yamls tools was used, the combined yaml should exist + ## If combined yaml does not exist, combine model, compile, and platform yamls + cd = Path.cwd() + combined = Path(f"combined-{name}.yaml") + combined_path=os.path.join(cd,combined) + + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist if Path(combined_path).exists(): full_combined = combined_path print("\nNOTE: Yamls previously merged.") else: - ## Combine yaml files to parse comb = cy.init_compile_yaml(yml,platform,target) - comb_model = comb.combine_model() - comb_compile = comb.combine_compile() - comb_platform = comb.combine_platforms() - full_combined = comb.clean_yaml() + full_combined = cy.get_combined_compileyaml(comb) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -72,7 +72,7 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) @@ -98,7 +98,7 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise ValueError (platformName + " does not exist in " + modelYaml.platformsfile) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Make the source directory based on the modelRoot and platform diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index f8874bd0..2d7d9ac6 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -167,18 +167,14 @@ def _yamlInfo(yamlfile,experiment,platform,target): combined = Path(f"combined-{e}.yaml") combined_path=os.path.join(cd,combined) - # If fre yamltools combine-yamls tools was used, the combined yaml should exist + # Combine model, experiment, and analysis yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist if Path(combined_path).exists(): - ## Make sure that the previously created combined yaml is valid full_combined = combined_path - print("\nNOTE: Yamls merged from combine-yamls tool") + print("\nNOTE: Yamls previously merged.") else: - ## Combine yaml files to parse comb = cy.init_pp_yaml(yml,e,p,t) - comb_model = comb.combine_model() - comb_exp = comb.combine_experiment() - comb_analysis = comb.combine_analysis() - full_combined = comb.clean_yaml() + full_combined = cy.get_combined_compileyaml(comb) # Validate yaml validate_yaml(full_combined) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 1520f3bb..577f9968 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -280,6 +280,40 @@ def clean_yaml(self): print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") return self.combined +def get_combined_compileyaml(comb): + """ + Combine the model, compile, and platform yamls + Arguments: + - comb : combined yaml object + """ + # Merge model into combined file + comb_model = comb.combine_model() + # Merge compile.yaml into combined file + comb_compile = comb.combine_compile() + # Merge platforms.yaml into combined file + comb_platform = comb.combine_platforms() + # Clean the yaml + full_combined = comb.clean_yaml() + + return full_combined + +def get_combined_ppyaml(comb): + """ + Combine the model, experiment, and analysis yamls + Arguments: + - comb : comine yaml object + """ + # Merge model into combined file + comb_model = comb.combine_model() + # Merge pp experiment yamls into combined file + comb_exp = comb.combine_experiment() + # Merge pp analysis yamls, if defined, into combined file + comb_analysis = comb.combine_analysis() + # Clean the yaml + full_combined = comb.clean_yaml() + + return full_combined + ########################################################################################### def _consolidate_yamls(yamlfile,experiment,platform,target,use): # Regsiter tag handler @@ -289,28 +323,10 @@ def _consolidate_yamls(yamlfile,experiment,platform,target,use): mainyaml_dir = os.path.dirname(yamlfile) if use == "compile": - # Define yaml object - comb = init_compile_yaml(yamlfile,platform,target) - # Merge model into combined file - comb.combine_model() - # Merge compile.yaml into combined file - comb.combine_compile() - # Merge platforms.yaml into combined file - comb.combine_platforms() - # Clean the yaml - comb.clean_yaml() + get_combined_compileyaml(yamlfile,platform,target) if use =="pp": - # Define yaml object - comb = init_pp_yaml(yamlfile,experiment,platform,target) - # Merge model into combined file - comb.combine_model() - # Merge pp experiment yamls into combined file - comb.combine_experiment() - # Merge pp analysis yamls, if defined, into combined file - comb.combine_analysis() - # Clean the yaml - comb.clean_yaml() + get_combined_ppyaml(yamlfile,experiment,platform,target) @click.command() def consolidate_yamls(yamlfile,experiment,platform,target,use): From 417c326a17e5c8adf4983ecd67ec55cf529798e5 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 10 Sep 2024 16:14:04 -0400 Subject: [PATCH 38/61] #141 Remove unnecessary comments --- fre/make/createCheckout.py | 4 ++-- fre/make/gfdlfremake/buildBaremetal.py | 1 - fre/make/gfdlfremake/buildDocker.py | 1 - fre/make/gfdlfremake/makefilefre.py | 1 - fre/make/tests/AM5_example/am5.yaml | 11 ----------- fre/pp/tests/test_configure_script_yaml.py | 2 +- 6 files changed, 3 insertions(+), 17 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index d264ff15..e424e693 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -69,7 +69,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v if modelYaml.platforms.hasPlatform(platformName): pass else: - raise ValueError (platformName + " does not exist in platforms.yaml") #modelYaml.combined.get("compile").get("platformYaml")) + raise ValueError (platformName + " does not exist in platforms.yaml") (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the source directory for the platform @@ -83,7 +83,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v freCheckout = checkout.checkout("checkout.sh",srcDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) - print("\nCheckout script created in "+ srcDir + "/checkout.sh \n") + click.echo("\nCheckout script created in "+ srcDir + "/checkout.sh \n") # Run the checkout script if run == True: diff --git a/fre/make/gfdlfremake/buildBaremetal.py b/fre/make/gfdlfremake/buildBaremetal.py index 49ef532e..f0279a11 100644 --- a/fre/make/gfdlfremake/buildBaremetal.py +++ b/fre/make/gfdlfremake/buildBaremetal.py @@ -6,7 +6,6 @@ import subprocess import os -#from . import targetfre def fremake_parallel(fremakeBuildList): """ diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 37b25fa1..3c93de4d 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -5,7 +5,6 @@ ## \description import os -#from . import targetfre class container(): """ diff --git a/fre/make/gfdlfremake/makefilefre.py b/fre/make/gfdlfremake/makefilefre.py index 0a6196d7..0db5c60b 100644 --- a/fre/make/gfdlfremake/makefilefre.py +++ b/fre/make/gfdlfremake/makefilefre.py @@ -1,5 +1,4 @@ import os -#import subprocess import textwrap def linklineBuild(self): diff --git a/fre/make/tests/AM5_example/am5.yaml b/fre/make/tests/AM5_example/am5.yaml index 05d5b9e3..359755ec 100644 --- a/fre/make/tests/AM5_example/am5.yaml +++ b/fre/make/tests/AM5_example/am5.yaml @@ -70,46 +70,35 @@ experiments: - name: "c96L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c96_amip.yaml" -# compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c96_clim.yaml" -# compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c96_clim.yaml" -# compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c96_clim.yaml" -# compile: *compile_yaml - name: "c384L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c384_amip.yaml" -# compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c384_clim.yaml" -# compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c384_clim.yaml" -# compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c384_clim.yaml" -# compile: *compile_yaml - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c384_amip.yaml" - "yaml_include/pp.om4.yaml" -# compile: *compile_yaml - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c96_amip.yaml" - "yaml_include/pp.om4.yaml" -# compile: *compile_yaml - name: "c96L65_am5f7b12r1_amip_cosp" pp: - "yaml_include/pp.c96_amip.yaml" -# compile: *compile_yaml diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 2c6790bd..ce308e31 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -10,7 +10,7 @@ # Set example yaml paths, input directory CWD = Path.cwd() test_dir = Path("fre/pp/tests") -test_yaml = Path(f"AM5_example/am5.yaml")#combined-{experiment}.yaml") +test_yaml = Path(f"AM5_example/am5.yaml") # Set home for ~/cylc-src location in script os.environ["HOME"]=str(Path(f"{CWD}/{test_dir}/configure_yaml_out")) From 043136f2d27f8ea043cf5837e89a7d813024283c Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 10 Sep 2024 16:33:33 -0400 Subject: [PATCH 39/61] #141 Fix combining in configure_script_yaml --- fre/pp/configure_script_yaml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 2d7d9ac6..280402a5 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -174,7 +174,7 @@ def _yamlInfo(yamlfile,experiment,platform,target): print("\nNOTE: Yamls previously merged.") else: comb = cy.init_pp_yaml(yml,e,p,t) - full_combined = cy.get_combined_compileyaml(comb) + full_combined = cy.get_combined_ppyaml(comb) # Validate yaml validate_yaml(full_combined) From ad44ce27c47658093b218a3970de3cabe46c677c Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 10 Sep 2024 16:59:44 -0400 Subject: [PATCH 40/61] #141 Add comments and change `print` statements to `raise Exception` --- fre/make/gfdlfremake/buildDocker.py | 6 ++++++ fre/make/gfdlfremake/platformfre.py | 18 ++++++------------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 3c93de4d..ee93ecd1 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -30,6 +30,8 @@ def __init__(self,base,exp,libs,RUNenv,target): self.mkmf = True self.target = target self.template = "/apps/mkmf/templates/hpcme-intel21.mk" + + # Set up spack loads in RUN commands in dockerfile if RUNenv == "": self.setup = ["RUN \\ \n"] else: @@ -40,9 +42,13 @@ def __init__(self,base,exp,libs,RUNenv,target): if self.l: for l in self.l: self.setup.append(" && spack load "+l+" \\ \n") + + # Clone and copy mkmf through Dockerfile self.mkmfclone=["RUN cd /apps \\ \n", " && git clone --recursive https://github.com/NOAA-GFDL/mkmf \\ \n", " && cp mkmf/bin/* /usr/local/bin \n"] + + # Set bld_dir, src_dir, mkmf_template self.bldsetup=["RUN bld_dir="+self.bld+" \\ \n", " && src_dir="+self.src+" \\ \n", " && mkmf_template="+self.template+ " \\ \n"] diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index 7348f3a7..fe8924f9 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -17,25 +17,21 @@ def __init__(self,platforminfo): try: p["name"] except: - print("At least one of the platforms is missing a name in "+fname+"\n") - raise + raise Exception("At least one of the platforms is missing a name in "+fname+"\n") ## Check the compiler try: p["compiler"] except: - print ("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") - raise + raise Exception("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") ## Check for the Fortran (fc) and C (cc) compilers try: p["fc"] except: - print ("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") - raise + raise Exception("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") try: p["cc"] except: - print ("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") - raise + raise Exception("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") ## Check for modules to load try: p["modules"] @@ -64,8 +60,7 @@ def __init__(self,platforminfo): try: p["containerBuild"] except: - print ("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") - raise + raise Exception("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": raise ValueError("Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") ## Check for container environment set up for RUN commands @@ -77,8 +72,7 @@ def __init__(self,platforminfo): try: p["containerRun"] except: - print ("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") - raise + raise Exception("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": raise ValueError("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") ## set the location of the mkTemplate. From cf5bc42ca90e3bdad57561d365dc686633281b0b Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 10 Sep 2024 17:24:55 -0400 Subject: [PATCH 41/61] #141 Make variable and comments more helpful --- fre/make/gfdlfremake/makefilefre.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/fre/make/gfdlfremake/makefilefre.py b/fre/make/gfdlfremake/makefilefre.py index 0db5c60b..dfe15fff 100644 --- a/fre/make/gfdlfremake/makefilefre.py +++ b/fre/make/gfdlfremake/makefilefre.py @@ -102,8 +102,12 @@ def createLibstring (self,c,r,o): - r The requires for that component - o The overrides for that component """ - d=zip(self.c,self.r,self.o) - return(sorted(d,key=lambda values:len(values[1]),reverse=True)) + # org_comp : returns a zip object + org_comp = zip(self.c,self.r,self.o) + # Sort zip object so that the component with the most requires (self.r) is listed first, and so on + sort = sorted(org_comp,key=lambda values:len(values[1]),reverse=True) + + return sort def writeMakefile (self): """ From b0364881fea444574df52c2100b44a748a264a60 Mon Sep 17 00:00:00 2001 From: Chris Blanton Date: Tue, 10 Sep 2024 18:12:27 -0400 Subject: [PATCH 42/61] #141 Initialize the init_compile_yaml and init_pp_yaml objects before combining --- fre/yamltools/combine_yamls.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 577f9968..859973b7 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -323,10 +323,12 @@ def _consolidate_yamls(yamlfile,experiment,platform,target,use): mainyaml_dir = os.path.dirname(yamlfile) if use == "compile": - get_combined_compileyaml(yamlfile,platform,target) + combined = init_compile_yaml(yamlfile, platform, target) + get_combined_compileyaml(combined) if use =="pp": - get_combined_ppyaml(yamlfile,experiment,platform,target) + combined = init_pp_yaml(yamlfile,experiment,platform,target) + get_combined_ppyaml(combined) @click.command() def consolidate_yamls(yamlfile,experiment,platform,target,use): From ec63d02335198ba9da43df1e0db3bfaf36c8603f Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 13 Sep 2024 09:53:46 -0400 Subject: [PATCH 43/61] #141 Reorganize test yamls --- fre/yamltools/tests/AM5_example/am5.yaml | 38 +--- .../compile_yamls/compile-schema.json | 193 ++++++++++++++++++ .../{ => compile_yamls}/compile.yaml | 0 .../platforms.yaml | 2 +- .../tests/AM5_example/pp_yamls/pp-schema.json | 75 +++++++ .../pp.c96_amip.yaml | 0 6 files changed, 272 insertions(+), 36 deletions(-) create mode 100644 fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json rename fre/yamltools/tests/AM5_example/{ => compile_yamls}/compile.yaml (100%) rename fre/yamltools/tests/AM5_example/{yaml_include => compile_yamls}/platforms.yaml (89%) create mode 100644 fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json rename fre/yamltools/tests/AM5_example/{yaml_include => pp_yamls}/pp.c96_amip.yaml (100%) diff --git a/fre/yamltools/tests/AM5_example/am5.yaml b/fre/yamltools/tests/AM5_example/am5.yaml index 07e22a07..c57c997c 100644 --- a/fre/yamltools/tests/AM5_example/am5.yaml +++ b/fre/yamltools/tests/AM5_example/am5.yaml @@ -40,8 +40,8 @@ fre_properties: build: # compile information - compileYaml: "compile.yaml" - platformYaml: "yaml_include/platforms.yaml" + compileYaml: "compile_yamls/compile.yaml" + platformYaml: "compile_yamls/platforms.yaml" shared: # directories shared across tools @@ -69,36 +69,4 @@ shared: experiments: - name: "c96L65_am5f7b12r1_amip" pp: - - "yaml_include/pp.c96_amip.yaml" - - name: "c96L65_am5f7b12r1_pdclim1850F" - pp: - - "yaml_include/pp.c96_clim.yaml" - - name: "c96L65_am5f7b12r1_pdclim2010F" - pp: - - "yaml_include/pp.c96_clim.yaml" - - name: "c96L65_am5f7b12r1_pdclim2010AERF" - pp: - - "yaml_include/pp.c96_clim.yaml" - - name: "c384L65_am5f7b12r1_amip" - pp: - - "yaml_include/pp.c384_amip.yaml" - - name: "c384L65_am5f7b12r1_pdclim2010F" - pp: - - "yaml_include/pp.c384_clim.yaml" - - name: "c384L65_am5f7b12r1_pdclim1850F" - pp: - - "yaml_include/pp.c384_clim.yaml" - - name: "c384L65_am5f7b12r1_pdclim2010AERF" - pp: - - "yaml_include/pp.c384_clim.yaml" - - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" - pp: - - "yaml_include/pp.c384_amip.yaml" - - "yaml_include/pp.om4.yaml" - - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" - pp: - - "yaml_include/pp.c96_amip.yaml" - - "yaml_include/pp.om4.yaml" - - name: "c96L65_am5f7b12r1_amip_cosp" - pp: - - "yaml_include/pp.c96_amip.yaml" + - "pp_yamls/pp.c96_amip.yaml" diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json b/fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json new file mode 100644 index 00000000..fbdcdee1 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json @@ -0,0 +1,193 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://example.com/product.schema.json", + "title": "frecanopy", + "description": "A yaml base for use with fre canopy", + "type": "object", + "properties": { + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "platform": { + "description": "The platforms listed in the command", + "type": ["string","array"] + }, + "target": { + "description": "The targets listed in the command", + "type": ["string","array"] + }, + "build": { + "description": "", + "type": "object", + "properties": { + "compileYaml": { + "description": "Path to the compile yaml.", + "type": "string" + }, + "platformYaml": { + "description": "Path to the platform yaml.", + "type": "string" + } + } + }, + "compile": { + "description": "The source code descriptions", + "type": "object", + "items":{"$ref": "#/$defs/comp" } + }, + "platforms":{ + "description": "FRE platforms", + "type": "array", + "items":{"$ref": "#/$defs/plat" } + } + }, + "$defs":{ + "comp": { + "type": "object", + "required": ["experiment","src"], + "properties":{ + "experiment": { + "description": "The name of the model", + "type": "string" + }, + "container_addlibs":{ + "description": "Libraries and packages needed for linking in the container", + "type": ["array","string","null"] + }, + "baremetal_linkerflags":{ + "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", + "type": ["array","string","null"] + }, + "src":{ + "description": "The source code descriptions", + "type": "array", + "items":{"$ref": "#/$defs/source" } + } + } + }, + "source": { + "type": "object", + "required": ["component", "repo"], + "properties":{ + "component": { + "type": "string", + "description": "The name of the model component" + }, + "repo": { + "anyOf": [ + { + "type": "string", + "description": "The URL of the code repository" + }, + { + "type": ["array","string"], + "description": "The URL of the code repository" + } + ] + }, + "branch": { + "anyOf": [ + { + "type": "string", + "description": "The version of code to clone" + }, + { + "type": ["array","string"], + "description": "The version of code to clone" + } + ] + }, + "requires": { + "type": ["array","string"], + "description": "list of componets that this component depends on" + }, + "cppdefs": { + "type": "string", + "description": "String of CPPDEFs to include in compiling the component" + }, + "paths": { + "type": ["array","string"], + "description": "A list of the paths in the component to compile" + }, + "additionalInstructions": { + "type": "string", + "description": "Additional instuctions to run after checkout" + }, + "doF90Cpp": { + "type": "boolean", + "description": "True if the preprocessor needs to be run" + }, + "makeOverrides": { + "type": "string", + "description": "Overrides openmp target for MOM6" + }, + "otherFlags": { + "type": "string", + "description": "String of Include flags necessary to retrieve other code needed" + } + }, + "additionalProperties": false + }, + "plat": { + "type": "object", + "required": ["name", "compiler", "fc", "cc"], + "properties":{ + "name": { + "type": "string", + "description": "The name of the platform" + }, + "compiler": { + "type": "string", + "description": "The compiler used to build the model" + }, + "modulesInit": { + "type": ["array","string"], + "description": "Array of commands to run before loading modules" + }, + "modules": { + "type": ["array","string"], + "description": "List (array) of modules to load" + }, + "fc": { + "type": "string", + "description": "The Fortran compiler" + }, + "cc": { + "type": "string", + "description": "The C compiler" + }, + "mkTemplate": { + "type": "string", + "description": "Path to the mk template file" + }, + "modelRoot": { + "type": "string", + "description": "Path to the root for all model install files" + }, + "container": { + "anyOf": + [ + {"type": "string"}, + {"type": "boolean"} + ] + }, + "RUNenv": { + "type": ["array", "string"], + "description": "Commands needed at the beginning of a RUN in dockerfile" + }, + "containerBuild": { + "type": "string", + "description": "Program used to build the container" + }, + "containerRun": { + "type": "string", + "description": "Program used to run the container" + } + }, + "additionalProperties": false + } + }, + "required": ["compile","platforms"], + "additionalProperties": true +} diff --git a/fre/yamltools/tests/AM5_example/compile.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile.yaml similarity index 100% rename from fre/yamltools/tests/AM5_example/compile.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile.yaml diff --git a/fre/yamltools/tests/AM5_example/yaml_include/platforms.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/platforms.yaml similarity index 89% rename from fre/yamltools/tests/AM5_example/yaml_include/platforms.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/platforms.yaml index af3a702c..7e1b9f49 100644 --- a/fre/yamltools/tests/AM5_example/yaml_include/platforms.yaml +++ b/fre/yamltools/tests/AM5_example/compile_yamls/platforms.yaml @@ -14,7 +14,7 @@ platforms: fc: ftn cc: cc mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: "/ncrc/home1/Dana.Singh/fre/fre-cli/fre/make/tests/AM5_example/combine_yamls/test" #${HOME}/fremake_canopy/test + modelRoot: ${HOME}/fremake_canopy/test - name: hpcme.2023 compiler: intel RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] diff --git a/fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json b/fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json new file mode 100644 index 00000000..dfb9cff5 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "Schema for PP Yaml", + "type": "object", + "properties": { + "name": {"type": "string"}, + "platform": {"type": "string"}, + "target": {"type": "string"}, + "directories": { + "description": "FRE shared directories", + "type": "object", + "items":{"$ref": "#/$defs/dirs" } + }, + "postprocess": { + "description": "FRE post-processing information", + "type": "object", + "items":{"$ref": "#/$defs/pp" } + } + }, + "$defs": { + "dirs": { + "history_dir": {"type":"string"}, + "pp_dir": {"type":"string"}, + "ptmp_dir": {"type":"string"}, + "refinediag_scripts":{"type":["string","null"]}, + "preanalysis_script":{"type":["string","null"]}, + "history_refined":{"type":["string","null"]}, + "analysis_dir":{"type":["string","null"]}, + "pp_grid_spec": {"type":"string"}, + "fre_analysis_home": {"type":["string","null"]} + }, + "pp": { + "type": "object", + "properties": { + "settings": { + "type:": "object", + "properties": { + "history_segment": {"type":"string"}, + "site": {"type":"string"}, + "pp_chunk_a": {"type":"string"}, + "pp_chunk_b": {"type":"string"}, + "pp_start": {"type":"string"}, + "pp_stop": {"type":"string"}, + "pp_components": {"type":"string"} + } + }, + "switches": { + "type": "object", + "properties": { + "clean_work": {"type":"boolean"}, + "do_mdtf": {"type":"boolean"}, + "do_statics": {"type":"boolean"}, + "do_timeavgs": {"type":"boolean"}, + "do_refinediag": {"type":"boolean"}, + "do_atmos_plevel_masking": {"type":"boolean"}, + "do_preanalysis": {"type":"boolean"}, + "do_analysis": {"type":"boolean"}, + "do_analysis_only": {"type":"boolean"} + } + }, + "components": { + "type": "array", + "properties": { + "type": {"type":"string"}, + "sources": {"type":"string"}, + "sourceGrid": {"type":"string"}, + "xyInterp": {"type":"string"}, + "interpMethod": {"type":"string"}, + "inputRealm": {"type":"string"} + } + } + } + } + } +} diff --git a/fre/yamltools/tests/AM5_example/yaml_include/pp.c96_amip.yaml b/fre/yamltools/tests/AM5_example/pp_yamls/pp.c96_amip.yaml similarity index 100% rename from fre/yamltools/tests/AM5_example/yaml_include/pp.c96_amip.yaml rename to fre/yamltools/tests/AM5_example/pp_yamls/pp.c96_amip.yaml From 9976464315a01fb978d9821411c234d381c06c9a Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 13 Sep 2024 09:54:51 -0400 Subject: [PATCH 44/61] #141 Fix paths and add test cases - new test for validation of combiend yamls - new fail test for if compile yaml path not correct - new fail test for if a value in the compile.yaml is not of right data type --- .../fail_cases/am5-wrong_compile.yaml | 72 ++++++++ .../fail_cases/am5-wrong_datatype.yaml | 72 ++++++++ .../combined-am5-wrong_compile.yaml | 77 +++++++++ .../combined-am5-wrong_datatype.yaml | 58 +++++++ .../compile_fail/wrong_compile.yaml | 11 ++ .../compile_fail/wrong_platforms.yaml | 26 +++ fre/yamltools/tests/test_combine_yamls.py | 161 ++++++++++++++---- 7 files changed, 444 insertions(+), 33 deletions(-) create mode 100644 fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml create mode 100644 fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml create mode 100644 fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml create mode 100644 fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml create mode 100644 fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml create mode 100644 fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_platforms.yaml diff --git a/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml b/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml new file mode 100644 index 00000000..07f4b40f --- /dev/null +++ b/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml @@ -0,0 +1,72 @@ +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "P1Y" + - &PP_AMIP_CHUNK384 "P1Y" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "P5Y" + - &PP_CPLD_CHUNK_B "P20Y" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +build: + # compile information + compileYaml: "compile_fail/compiile.yaml" + platformYaml: "compile_fail/platforms.yaml" + +shared: + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - diff --git a/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml b/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml new file mode 100644 index 00000000..d983740d --- /dev/null +++ b/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml @@ -0,0 +1,72 @@ +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "P1Y" + - &PP_AMIP_CHUNK384 "P1Y" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "P5Y" + - &PP_CPLD_CHUNK_B "P20Y" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +build: + # compile information + compileYaml: "compile_fail/wrong_compile.yaml" + platformYaml: "compile_fail/wrong_platforms.yaml" + +shared: + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - diff --git a/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml b/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml new file mode 100644 index 00000000..e668b756 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml @@ -0,0 +1,77 @@ +name: &name "am5-wrong_compile" +platform: &platform "ncrc5.intel23" +target: &target "prod" + +### MODEL YAML SETTINGS ### +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "P1Y" + - &PP_AMIP_CHUNK384 "P1Y" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "P5Y" + - &PP_CPLD_CHUNK_B "P20Y" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +build: + # compile information + compileYaml: "compile_fail/compiile.yaml" + platformYaml: "compile_fail/platforms.yaml" + +shared: + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - diff --git a/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml b/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml new file mode 100644 index 00000000..fbff287d --- /dev/null +++ b/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml @@ -0,0 +1,58 @@ +name: am5-wrong_datatype +platform: ncrc5.intel23 +target: prod +build: + compileYaml: compile_fail/wrong_compile.yaml + platformYaml: compile_fail/wrong_platforms.yaml +compile: + experiment: am5 + container_addlibs: null + baremetal_linkerflags: null + src: + - component: FMS + repo: https://github.com/NOAA-GFDL/FMS.git + cppdefs: -DHAVE_GETTID -Duse_libMPI -Duse_netCDF + branch: 2022e+1 + otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include +platforms: +- name: ncrc5.intel + compiler: intel + modulesInit: + - " module use -a /ncrc/home2/fms/local/modulefiles \n" + - "source $MODULESHOME/init/sh \n" + modules: + - intel-classic/2022.2.1 + - fre/bronx-20 + - cray-hdf5/1.12.2.3 + - cray-netcdf/4.9.0.3 + fc: ftn + cc: cc + mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk + modelRoot: ${HOME}/fremake_canopy/test +- name: ncrc5.intel23 + compiler: intel + modulesInit: + - " module use -a /ncrc/home2/fms/local/modulefiles \n" + - "source $MODULESHOME/init/sh \n" + modules: + - intel-classic/2023.1.0 + - fre/bronx-20 + - cray-hdf5/1.12.2.3 + - cray-netcdf/4.9.0.3 + fc: ftn + cc: cc + mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk + modelRoot: ${HOME}/fremake_canopy/test +- name: hpcme.2023 + compiler: intel + RUNenv: + - . /spack/share/spack/setup-env.sh + - spack load libyaml + - spack load netcdf-fortran@4.5.4 + - spack load hdf5@1.14.0 + modelRoot: /apps + fc: mpiifort + cc: mpiicc + container: true + containerBuild: podman + containerRun: apptainer diff --git a/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml b/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml new file mode 100644 index 00000000..c3015d0c --- /dev/null +++ b/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml @@ -0,0 +1,11 @@ +compile: + experiment: "am5" + container_addlibs: + baremetal_linkerflags: + src: + - component: "FMS" + repo: "https://github.com/NOAA-GFDL/FMS.git" + cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" + branch: 2022e+1 + cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" + otherFlags: *FMSincludes diff --git a/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_platforms.yaml b/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_platforms.yaml new file mode 100644 index 00000000..7e1b9f49 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_platforms.yaml @@ -0,0 +1,26 @@ +platforms: + - name: ncrc5.intel + compiler: intel + modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] + modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + fc: ftn + cc: cc + mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + modelRoot: ${HOME}/fremake_canopy/test + - name: ncrc5.intel23 + compiler: intel + modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] + modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + fc: ftn + cc: cc + mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + modelRoot: ${HOME}/fremake_canopy/test + - name: hpcme.2023 + compiler: intel + RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] + modelRoot: /apps + fc: mpiifort + cc: mpiicc + container: True + containerBuild: "podman" + containerRun: "apptainer" diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index 0f8e2fbd..9fd1d349 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -2,7 +2,12 @@ from pathlib import Path import pytest import shutil +import json +import yaml +from jsonschema import validate from fre.yamltools import combine_yamls as cy +from multiprocessing import Process + ## SET-UP # Set example yaml paths, input directory, output directory @@ -20,10 +25,16 @@ else: Path(out_dir).mkdir(parents=True,exist_ok=True) -# Set what would be click options -EXPERIMENT = "c96L65_am5f7b12r1_amip" -PLATFORM = "gfdl.ncrc5-intel22-classic" -TARGET = "prod" +## Set what would be click options +# Compile +COMP_EXPERIMENT = "am5" +COMP_PLATFORM = "ncrc5.intel23" +COMP_TARGET = "prod" + +# Post-processing +PP_EXPERIMENT = "c96L65_am5f7b12r1_amip" +PP_PLATFORM = "gfdl.ncrc5-intel22-classic" +PP_TARGET = "prod" def test_modelyaml_exists(): """ @@ -35,26 +46,13 @@ def test_compileyaml_exists(): """ Make sure experiment yaml file exists """ - assert Path(f"{in_dir}/compile.yaml").exists() + assert Path(f"{in_dir}/compile_yamls/compile.yaml").exists() def test_platformyaml_exists(): """ Make sure experiment yaml file exists """ - assert Path(f"{in_dir}/yaml_include/platforms.yaml").exists() - -def test_expyaml_exists(): - """ - Make sure experiment yaml file exists - """ - assert Path(f"{in_dir}/yaml_include/pp.c96_amip.yaml").exists() - -@pytest.mark.skip(reason='analysis scripts might not be defined') -def test_analysisyaml_exists(): - """ - Make sure experiment yaml file exists - """ - assert Path(f"{in_dir}/yaml_include/analysis.yaml").exists() + assert Path(f"{in_dir}/compile_yamls/platforms.yaml").exists() def test_merged_compile_yamls(): """ @@ -66,37 +64,134 @@ def test_merged_compile_yamls(): # Model yaml path modelyaml = "am5.yaml" - - USE_COMPILE = "compile" + USE = "compile" # Merge the yamls - cy._consolidate_yamls(modelyaml, EXPERIMENT, PLATFORM, TARGET, USE_COMPILE) + cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) # Move combined yaml to output location - shutil.move(f"combined-am5.yaml", out_dir) + comp_outDir = os.path.join(out_dir,"compile_yaml") + Path(comp_outDir).mkdir(parents=True,exist_ok=True) + shutil.move(f"combined-am5.yaml", comp_outDir) # Check that the combined yaml exists - assert Path(f"{out_dir}/combined-am5.yaml").exists() + assert Path(f"{comp_outDir}/combined-{COMP_EXPERIMENT}.yaml").exists() + +def test_combined_compileyaml_validation(): + """ + Validate the combined compile yaml + """ + combined_yamlfile =f"{out_dir}/compile_yaml/combined-{COMP_EXPERIMENT}.yaml" + schema_dir = Path(f"{in_dir}/compile_yamls") + schema_file = os.path.join(schema_dir, 'compile-schema.json') + + with open(combined_yamlfile,'r') as cf: + yml = yaml.safe_load(cf) + + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) + + validate(instance=yml,schema=schema) + +def test_combined_compileyaml_combinefail(): + """ + Check to test if compile yaml is incorrect/does not exist, + the combine fails. (compile yaml path misspelled) + """ + # Go into the input directory + os.chdir(f"{in_dir}/fail_cases") + + # Model yaml path + modelyaml = f"am5-wrong_compile.yaml" + USE = "compile" + + # Merge the yamls - should fail since there is no compile yaml specified in the model yaml + try: + consolidate = cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) + except: + print("EXPECTED FAILURE") + assert True + +def test_combined_compileyaml_validatefail(): + """ + Check if the schema is validating correctly + Branch should be string + """ + # Go into the input directory + os.chdir(f"{in_dir}/fail_cases") + + # Model yaml path + modelyaml = "am5-wrong_datatype.yaml" + USE = "compile" + + # Merge the yamls + cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) + + wrong_combined = "combined-am5-wrong_datatype.yaml" + + schema_dir = Path(f"{in_dir}/compile_yamls") + schema_file = os.path.join(schema_dir, 'compile-schema.json') + + with open(wrong_combined,'r') as cf: + yml = yaml.safe_load(cf) + + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) + + validate(instance=yml,schema=schema) + +############ PP ############ +def test_expyaml_exists(): + """ + Make sure experiment yaml file exists + """ + assert Path(f"{in_dir}/pp_yamls/pp.c96_amip.yaml").exists() + +@pytest.mark.skip(reason='analysis scripts might not be defined yet') +def test_analysisyaml_exists(): + """ + Make sure experiment yaml file exists + """ + assert Path(f"{in_dir}/pp_yamls/analysis.yaml").exists() def test_merged_pp_yamls(): """ - Check for the creation of the combined-[experiment] yaml + Check for the creation of the combined-[experiment] yaml Check that the model yaml was merged into the combined yaml """ + # Go into the input directory + os.chdir(in_dir) + # Model yaml path modelyaml = Path(f"{in_dir}/am5.yaml") - - USE_PP = "pp" + USE = "pp" # Merge the yamls - cy._consolidate_yamls(modelyaml, EXPERIMENT, PLATFORM, TARGET, USE_PP) + cy._consolidate_yamls(modelyaml, PP_EXPERIMENT, PP_PLATFORM, PP_TARGET, USE) # Move combined yaml to output location - shutil.move(f"combined-{EXPERIMENT}.yaml", out_dir) + pp_outDir = os.path.join(out_dir,"pp_yaml") + Path(pp_outDir).mkdir(parents=True,exist_ok=True) + shutil.move(f"combined-{PP_EXPERIMENT}.yaml", pp_outDir) # Check that the combined yaml exists - assert Path(f"{out_dir}/combined-{EXPERIMENT}.yaml").exists() + assert Path(f"{pp_outDir}/combined-{PP_EXPERIMENT}.yaml").exists() + +def test_combined_ppyaml_validation(): + """ + Validate the combined compile yaml + """ + combined_yamlfile =f"{out_dir}/pp_yaml/combined-{PP_EXPERIMENT}.yaml" + schema_dir = Path(f"{in_dir}/pp_yamls") + schema_file = os.path.join(schema_dir, 'pp-schema.json') + + with open(combined_yamlfile,'r') as cf: + yml = yaml.safe_load(cf) + + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) -##TO-DO: -# - check for correct yaml merging -# - validation + validate(instance=yml,schema=schema) From 579f34eaddf0add457b9c0a3f4e58cd2a2a9f974 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 13 Sep 2024 09:56:51 -0400 Subject: [PATCH 45/61] #141 Fix error checking --- fre/yamltools/combine_yamls.py | 46 +++++++++++++++++++++++----------- 1 file changed, 32 insertions(+), 14 deletions(-) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 859973b7..5d93118e 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -41,17 +41,25 @@ def get_compile_paths(mainyaml_dir,comb): # set platform yaml filepath if comb_model["build"]["platformYaml"] is not None: - py=comb_model["build"]["platformYaml"] - py_path=Path(os.path.join(mainyaml_dir,py)) + if Path(comb_model["build"]["platformYaml"]).exists(): + py=comb_model["build"]["platformYaml"] + py_path=Path(os.path.join(mainyaml_dir,py)) + else: + raise ValueError("Incorrect platform yaml path given; does not exist.") else: - py_path=None + raise ValueError("No platform yaml path given!") + #py_path=None # set compile yaml filepath if comb_model["build"]["compileYaml"] is not None: - cy=comb_model["build"]["compileYaml"] - cy_path=Path(os.path.join(mainyaml_dir,cy)) + if Path(comb_model["build"]["compileYaml"]).exists(): + cy=comb_model["build"]["compileYaml"] + cy_path=Path(os.path.join(mainyaml_dir,cy)) + else: + raise ValueError("Incorrect compile yaml path given; does not exist.") else: - cy_path=None + raise ValueError("No compile yaml path given!") + #cy_path=None return (py_path,cy_path) @@ -81,25 +89,32 @@ def experiment_check(mainyaml_dir,comb,experiment): if experiment == i.get("name"): expyaml=i.get("pp") analysisyaml=i.get("analysis") - + if expyaml is not None: ey_path=[] for e in expyaml: - ey=Path(os.path.join(mainyaml_dir,e)) - ey_path.append(ey) + if Path(e).exists(): + ey=Path(os.path.join(mainyaml_dir,e)) + ey_path.append(ey) + else: + raise ValueError("Incorrect experiment yaml path given; does not exist.") else: - ey_path=None + raise ValueError("No experiment yaml path given!") if analysisyaml is not None: ay_path=[] for a in analysisyaml: - ay=Path(os.path.join(mainyaml_dir,a)) - ay_path.append(ay) + if Path(a).exists(): + ay=Path(os.path.join(mainyaml_dir,a)) + ay_path.append(ay) + else: + raise ValueError("Incorrect analysis yaml ath given; does not exist.") else: ay_path=None return (ey_path,ay_path) +## COMPILE CLASS ## class init_compile_yaml(): def __init__(self,yamlfile,platform,target): """ @@ -187,6 +202,7 @@ def clean_yaml(self): print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") return self.combined +## PP CLASS ## class init_pp_yaml(): def __init__(self,yamlfile,experiment,platform,target): """ @@ -236,7 +252,6 @@ def combine_experiment(self): #expyaml_path = os.path.join(mainyaml_dir, i) with open(self.combined,'a',encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: - #f1.write(f"\n### {i.upper()} settings ###\n") #copy expyaml into combined shutil.copyfileobj(f2,f1) print(f" experiment yaml: {i}") @@ -280,6 +295,7 @@ def clean_yaml(self): print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") return self.combined +## Functions to combine the yaml files def get_combined_compileyaml(comb): """ Combine the model, compile, and platform yamls @@ -301,7 +317,7 @@ def get_combined_ppyaml(comb): """ Combine the model, experiment, and analysis yamls Arguments: - - comb : comine yaml object + - comb : combined yaml object """ # Merge model into combined file comb_model = comb.combine_model() @@ -324,10 +340,12 @@ def _consolidate_yamls(yamlfile,experiment,platform,target,use): if use == "compile": combined = init_compile_yaml(yamlfile, platform, target) + # Create combined compile yaml get_combined_compileyaml(combined) if use =="pp": combined = init_pp_yaml(yamlfile,experiment,platform,target) + # Create combined pp yaml get_combined_ppyaml(combined) @click.command() From dd6cff8d8ef48d0ec090b8604f7d5bb30d5be935 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 13 Sep 2024 10:11:46 -0400 Subject: [PATCH 46/61] #141 Fix location --- fre/pp/tests/test_configure_script_yaml.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index ce308e31..6793dcbc 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -27,6 +27,8 @@ def test_configure_script(): Creates rose-suite, regrid rose-app, remap rose-app TO-DO: will break this up for better tests """ + os.chdir(f"{CWD}/{test_dir}/AM5_example") + # Set output directory out_dir = Path(f"{os.getenv('HOME')}/cylc-src/{experiment}__{platform}__{target}") Path(out_dir).mkdir(parents=True,exist_ok=True) From 3d330f03a0e55dc0beb191a90c99942a27bd3538 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 23 Sep 2024 10:33:51 -0400 Subject: [PATCH 47/61] #141 Combine yamls/rewrite combined yaml as default behavior --- fre/make/createCheckout.py | 14 ++------------ fre/pp/configure_script_yaml.py | 14 ++------------ 2 files changed, 4 insertions(+), 24 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index e424e693..8ed133e9 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -36,19 +36,9 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v plist = platform tlist = target - ## If combined yaml does not exists, combine model, compile, and platform yamls - cd = Path.cwd() - combined = Path(f"combined-{name}.yaml") - combined_path=os.path.join(cd,combined) - # Combine model, compile, and platform yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) + comb = cy.init_compile_yaml(yml,platform,target) + full_combined = cy.get_combined_compileyaml(comb) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 280402a5..7635e3bd 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -162,19 +162,9 @@ def _yamlInfo(yamlfile,experiment,platform,target): # Initialize the rose configurations rose_suite,rose_regrid,rose_remap = rose_init(e,p,t) - ## If combined yaml does not exists, combine model, experiment, and analysis yamls - cd = Path.cwd() - combined = Path(f"combined-{e}.yaml") - combined_path=os.path.join(cd,combined) - # Combine model, experiment, and analysis yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = cy.init_pp_yaml(yml,e,p,t) - full_combined = cy.get_combined_ppyaml(comb) + comb = cy.init_pp_yaml(yml,e,p,t) + full_combined = cy.get_combined_ppyaml(comb) # Validate yaml validate_yaml(full_combined) From 7c795545eb34ff1bfb3af479ef83355440008f3f Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 23 Sep 2024 10:35:07 -0400 Subject: [PATCH 48/61] #141 Update schema for validation (compilation yamls) - there was a false success with the previous schema --- fre/make/gfdlfremake/schema.json | 368 ++++++++++++++++--------------- 1 file changed, 195 insertions(+), 173 deletions(-) diff --git a/fre/make/gfdlfremake/schema.json b/fre/make/gfdlfremake/schema.json index 2ab69ba6..751bb9db 100644 --- a/fre/make/gfdlfremake/schema.json +++ b/fre/make/gfdlfremake/schema.json @@ -1,179 +1,201 @@ { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://example.com/product.schema.json", - "title": "frecanopy", - "description": "A yaml base for use with fre canopy", - "type": "object", - "properties": { - "name": { - "description": "The name of the experiment", - "type": "string" - }, - "platform": { - "description": "The platforms listed in the command", - "type": ["string","array"] - }, - "target": { - "description": "The targets listed in the command", - "type": ["string","array"] - }, - "compile": { - "description": "The source code descriptions", - "type": "object", - "items":{"$ref": "#/$defs/comp" } - }, - "platforms":{ - "description": "FRE platforms", - "type": "array", - "items":{"$ref": "#/$defs/plat" } + "$schema": "http://json-schema.org/draft-06/schema#", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "platform": { + "description": "The platforms listed in the command", + "type": "string" + }, + "target": { + "description": "The targets listed in the command", + "type": "string" + }, + "build": { + "type": "object", + "additionalProperties": false, + "properties": { + "compileYaml": { + "description": "Path to the compile yaml.", + "type": "string" + }, + "platformYaml": { + "description": "Path to the platform yaml.", + "type": "string" + } + } + }, + "compile": { + "description": "The source code descriptions", + "$ref": "#/definitions/Compile" + }, + "platforms": { + "description": "FRE platforms", + "type": "array", + "items": {"$ref": "#/definitions/Platform"} + } + }, + "definitions": { + "Compile": { + "type": "object", + "properties": { + "experiment": { + "description": "The name of the model", + "type": "string" + }, + "container_addlibs": { + "description": "Libraries and packages needed for linking in the container", + "type": ["array","string","null"] + }, + "baremetal_linkerflags": { + "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", + "type": ["array","string","null"] + }, + "src": { + "type": "array", + "items": {"$ref": "#/definitions/Src"} } - }, - "$defs":{ - "comp": { - "type": "object", - "required": ["experiment","src"], - "properties":{ - "experiment": { - "description": "The name of the model", - "type": "string" - }, - "container_addlibs":{ - "description": "Libraries and packages needed for linking in the container", - "type": ["array","string","null"] - }, - "baremetal_linkerflags":{ - "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", - "type": ["array","string","null"] - }, - "src":{ - "description": "The source code descriptions", - "type": "array", - "items":{"$ref": "#/$defs/source" } - } + } + }, + "Src": { + "type": "object", + "properties": { + "component": { + "description": "The name of the model component", + "type": "string" + }, + "repo": { + "anyOf": [ + { + "description": "The URL of the code repository", + "type": "array", + "items": { + "type": "string", + "format": "uri", + "qt-uri-protocols": [ + "https" + ], + "qt-uri-extensions": [ + ".git" + ] + } + }, + { + "description": "The URL of the code repository", + "type": "string", + "format": "uri", + "qt-uri-protocols": [ + "https" + ], + "qt-uri-extensions": [ + ".git" + ] } + ] + }, + "cppdefs": { + "description": "String of CPPDEFs to include in compiling the component", + "type": "string" + }, + "branch": { + "anyOf": [ + { + "description": "The version of code to clone", + "type": "array", + "items": { + "type": "string" + } + }, + { + "description": "The version of code to clone", + "type": "string" + } + ] + }, + "otherFlags": { + "description": "String of Include flags necessary to retrieve other code needed", + "type": "string" + }, + "requires": { + "description": "list of componets that this component depends on", + "type": "array", + "items": {"type": "string"} + }, + "paths": { + "description": "A list of the paths in the component to compile", + "type": "array", + "items": {"type": "string"} + }, + "doF90Cpp": { + "description": "True if the preprocessor needs to be run", + "type": "boolean" + }, + "makeOverrides": { + "description": "Overrides openmp target for MOM6", + "type": "string" + } + } + }, + "Platform": { + "type": "object", + "properties": { + "name": { + "description": "The name of the platform", + "type": "string" + }, + "compiler": { + "description": "The compiler used to build the model", + "type": "string" + }, + "modulesInit": { + "description": "Array of commands to run before loading modules", + "type": "array", + "items": {"type": "string"} + }, + "modules": { + "description": "List (array) of modules to load", + "type": "array", + "items": { + "type": "string" + } + }, + "fc": { + "description": "The Fortran compiler", + "type": "string" + }, + "cc": { + "description": "The C compiler", + "type": "string" + }, + "mkTemplate": { + "description": "Path to the mk template file", + "type": "string" + }, + "modelRoot": { + "description": "Path to the root for all model install files", + "type": "string" + }, + "RUNenv": { + "description": "Commands needed at the beginning of a RUN in dockerfile", + "type": ["array","string"] + }, + "container": { + "description": "True/False if using container to compile", + "type": "boolean" + }, + "containerBuild": { + "description": "Program used to build the container", + "type": "string" }, - "source": { - "type": "object", - "required": ["component", "repo"], - "properties":{ - "component": { - "type": "string", - "description": "The name of the model component" - }, - "repo": { - "anyOf": [ - { - "type": "string", - "description": "The URL of the code repository" - }, - { - "type": ["array","string"], - "description": "The URL of the code repository" - } - ] - }, - "branch": { - "anyOf": [ - { - "type": "string", - "description": "The version of code to clone" - }, - { - "type": ["array","string"], - "description": "The version of code to clone" - } - ] - }, - "requires": { - "type": ["array","string"], - "description": "list of componets that this component depends on" - }, - "cppdefs": { - "type": "string", - "description": "String of CPPDEFs to include in compiling the component" - }, - "paths": { - "type": ["array","string"], - "description": "A list of the paths in the component to compile" - }, - "additionalInstructions": { - "type": "string", - "description": "Additional instuctions to run after checkout" - }, - "doF90Cpp": { - "type": "boolean", - "description": "True if the preprocessor needs to be run" - }, - "makeOverrides": { - "type": "string", - "description": "Overrides openmp target for MOM6" - }, - "otherFlags": { - "type": "string", - "description": "String of Include flags necessary to retrieve other code needed" - } - }, - "additionalProperties": false - }, - "plat": { - "type": "object", - "required": ["name", "compiler", "fc", "cc"], - "properties":{ - "name": { - "type": "string", - "description": "The name of the platform" - }, - "compiler": { - "type": "string", - "description": "The compiler used to build the model" - }, - "modulesInit": { - "type": ["array","string"], - "description": "Array of commands to run before loading modules" - }, - "modules": { - "type": ["array","string"], - "description": "List (array) of modules to load" - }, - "fc": { - "type": "string", - "description": "The Fortran compiler" - }, - "cc": { - "type": "string", - "description": "The C compiler" - }, - "mkTemplate": { - "type": "string", - "description": "Path to the mk template file" - }, - "modelRoot": { - "type": "string", - "description": "Path to the root for all model install files" - }, - "container": { - "anyOf": - [ - {"type": "string"}, - {"type": "boolean"} - ] - }, - "RUNenv": { - "type": ["array", "string"], - "description": "Commands needed at the beginning of a RUN in dockerfile" - }, - "containerBuild": { - "type": "string", - "description": "Program used to build the container" - }, - "containerRun": { - "type": "string", - "description": "Program used to run the container" - } - }, - "additionalProperties": false + "containerRun": { + "description": "Program used to run the container", + "type": "string" } - }, - "required": ["platforms"], - "additionalProperties": true + } + } + } } From f33329445ba9f58da5a5302a69becad3c5ec5802 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 23 Sep 2024 13:19:37 -0400 Subject: [PATCH 49/61] #141 Reorganize test yamls and update tests - update paths in test script based on reorganizations - update test_combine_yamls for validation test, wrong compile file test, wrong data type (invalid yaml) test --- fre/yamltools/combine_yamls.py | 8 +- .../compile_yamls/compile-schema.json | 193 ------------------ .../compile_fail/am5-wrong_compilefile.yaml} | 4 +- .../compile_fail}/am5-wrong_datatype.yaml | 4 +- .../compile_fail/wrong_compile.yaml | 4 +- .../compile_fail/wrong_platforms.yaml | 0 .../AM5_example/compile_yamls/schema.json | 1 + .../combined-am5-wrong_compile.yaml | 77 ------- .../combined-am5-wrong_datatype.yaml | 58 ------ .../tests/AM5_example/pp_yamls/pp-schema.json | 75 ------- .../tests/AM5_example/pp_yamls/schema.json | 1 + fre/yamltools/tests/test_combine_yamls.py | 72 ++++--- 12 files changed, 55 insertions(+), 442 deletions(-) delete mode 100644 fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json rename fre/yamltools/tests/AM5_example/{fail_cases/am5-wrong_compile.yaml => compile_yamls/compile_fail/am5-wrong_compilefile.yaml} (95%) rename fre/yamltools/tests/AM5_example/{fail_cases => compile_yamls/compile_fail}/am5-wrong_datatype.yaml (95%) rename fre/yamltools/tests/AM5_example/{fail_cases => compile_yamls}/compile_fail/wrong_compile.yaml (85%) rename fre/yamltools/tests/AM5_example/{fail_cases => compile_yamls}/compile_fail/wrong_platforms.yaml (100%) create mode 120000 fre/yamltools/tests/AM5_example/compile_yamls/schema.json delete mode 100644 fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml delete mode 100644 fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml delete mode 100644 fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json create mode 120000 fre/yamltools/tests/AM5_example/pp_yamls/schema.json diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 5d93118e..fb2d24dc 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -197,7 +197,7 @@ def clean_yaml(self): del full_yaml[kc] with open(self.combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,sort_keys=False) + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") return self.combined @@ -289,8 +289,8 @@ def clean_yaml(self): if kc in full_yaml.keys(): del full_yaml[kc] - with open(self.combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,sort_keys=False) + with open(self.combined,'w') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") return self.combined @@ -307,7 +307,7 @@ def get_combined_compileyaml(comb): # Merge compile.yaml into combined file comb_compile = comb.combine_compile() # Merge platforms.yaml into combined file - comb_platform = comb.combine_platforms() + full_combined = comb.combine_platforms() # Clean the yaml full_combined = comb.clean_yaml() diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json b/fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json deleted file mode 100644 index fbdcdee1..00000000 --- a/fre/yamltools/tests/AM5_example/compile_yamls/compile-schema.json +++ /dev/null @@ -1,193 +0,0 @@ -{ - "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://example.com/product.schema.json", - "title": "frecanopy", - "description": "A yaml base for use with fre canopy", - "type": "object", - "properties": { - "name": { - "description": "The name of the experiment", - "type": "string" - }, - "platform": { - "description": "The platforms listed in the command", - "type": ["string","array"] - }, - "target": { - "description": "The targets listed in the command", - "type": ["string","array"] - }, - "build": { - "description": "", - "type": "object", - "properties": { - "compileYaml": { - "description": "Path to the compile yaml.", - "type": "string" - }, - "platformYaml": { - "description": "Path to the platform yaml.", - "type": "string" - } - } - }, - "compile": { - "description": "The source code descriptions", - "type": "object", - "items":{"$ref": "#/$defs/comp" } - }, - "platforms":{ - "description": "FRE platforms", - "type": "array", - "items":{"$ref": "#/$defs/plat" } - } - }, - "$defs":{ - "comp": { - "type": "object", - "required": ["experiment","src"], - "properties":{ - "experiment": { - "description": "The name of the model", - "type": "string" - }, - "container_addlibs":{ - "description": "Libraries and packages needed for linking in the container", - "type": ["array","string","null"] - }, - "baremetal_linkerflags":{ - "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", - "type": ["array","string","null"] - }, - "src":{ - "description": "The source code descriptions", - "type": "array", - "items":{"$ref": "#/$defs/source" } - } - } - }, - "source": { - "type": "object", - "required": ["component", "repo"], - "properties":{ - "component": { - "type": "string", - "description": "The name of the model component" - }, - "repo": { - "anyOf": [ - { - "type": "string", - "description": "The URL of the code repository" - }, - { - "type": ["array","string"], - "description": "The URL of the code repository" - } - ] - }, - "branch": { - "anyOf": [ - { - "type": "string", - "description": "The version of code to clone" - }, - { - "type": ["array","string"], - "description": "The version of code to clone" - } - ] - }, - "requires": { - "type": ["array","string"], - "description": "list of componets that this component depends on" - }, - "cppdefs": { - "type": "string", - "description": "String of CPPDEFs to include in compiling the component" - }, - "paths": { - "type": ["array","string"], - "description": "A list of the paths in the component to compile" - }, - "additionalInstructions": { - "type": "string", - "description": "Additional instuctions to run after checkout" - }, - "doF90Cpp": { - "type": "boolean", - "description": "True if the preprocessor needs to be run" - }, - "makeOverrides": { - "type": "string", - "description": "Overrides openmp target for MOM6" - }, - "otherFlags": { - "type": "string", - "description": "String of Include flags necessary to retrieve other code needed" - } - }, - "additionalProperties": false - }, - "plat": { - "type": "object", - "required": ["name", "compiler", "fc", "cc"], - "properties":{ - "name": { - "type": "string", - "description": "The name of the platform" - }, - "compiler": { - "type": "string", - "description": "The compiler used to build the model" - }, - "modulesInit": { - "type": ["array","string"], - "description": "Array of commands to run before loading modules" - }, - "modules": { - "type": ["array","string"], - "description": "List (array) of modules to load" - }, - "fc": { - "type": "string", - "description": "The Fortran compiler" - }, - "cc": { - "type": "string", - "description": "The C compiler" - }, - "mkTemplate": { - "type": "string", - "description": "Path to the mk template file" - }, - "modelRoot": { - "type": "string", - "description": "Path to the root for all model install files" - }, - "container": { - "anyOf": - [ - {"type": "string"}, - {"type": "boolean"} - ] - }, - "RUNenv": { - "type": ["array", "string"], - "description": "Commands needed at the beginning of a RUN in dockerfile" - }, - "containerBuild": { - "type": "string", - "description": "Program used to build the container" - }, - "containerRun": { - "type": "string", - "description": "Program used to run the container" - } - }, - "additionalProperties": false - } - }, - "required": ["compile","platforms"], - "additionalProperties": true -} diff --git a/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_compilefile.yaml similarity index 95% rename from fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_compilefile.yaml index 07f4b40f..86bbb57a 100644 --- a/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_compile.yaml +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_compilefile.yaml @@ -40,8 +40,8 @@ fre_properties: build: # compile information - compileYaml: "compile_fail/compiile.yaml" - platformYaml: "compile_fail/platforms.yaml" + compileYaml: "compiile.yaml" + platformYaml: "wrong_platforms.yaml" shared: # directories shared across tools diff --git a/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_datatype.yaml similarity index 95% rename from fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_datatype.yaml index d983740d..e65f3bd2 100644 --- a/fre/yamltools/tests/AM5_example/fail_cases/am5-wrong_datatype.yaml +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_datatype.yaml @@ -40,8 +40,8 @@ fre_properties: build: # compile information - compileYaml: "compile_fail/wrong_compile.yaml" - platformYaml: "compile_fail/wrong_platforms.yaml" + compileYaml: "wrong_compile.yaml" + platformYaml: "wrong_platforms.yaml" shared: # directories shared across tools diff --git a/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_compile.yaml similarity index 85% rename from fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_compile.yaml index c3015d0c..c122764a 100644 --- a/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_compile.yaml +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_compile.yaml @@ -3,9 +3,9 @@ compile: container_addlibs: baremetal_linkerflags: src: - - component: "FMS" + - component: repo: "https://github.com/NOAA-GFDL/FMS.git" cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" - branch: 2022e+1 + branch: 2022.01 cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" otherFlags: *FMSincludes diff --git a/fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_platforms.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_platforms.yaml similarity index 100% rename from fre/yamltools/tests/AM5_example/fail_cases/compile_fail/wrong_platforms.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_platforms.yaml diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/schema.json b/fre/yamltools/tests/AM5_example/compile_yamls/schema.json new file mode 120000 index 00000000..7f618a23 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/compile_yamls/schema.json @@ -0,0 +1 @@ +../../../../make/gfdlfremake/schema.json \ No newline at end of file diff --git a/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml b/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml deleted file mode 100644 index e668b756..00000000 --- a/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_compile.yaml +++ /dev/null @@ -1,77 +0,0 @@ -name: &name "am5-wrong_compile" -platform: &platform "ncrc5.intel23" -target: &target "prod" - -### MODEL YAML SETTINGS ### -# reusable variables -fre_properties: - - &AM5_VERSION "am5f7b12r1" - - &FRE_STEM !join [am5/, *AM5_VERSION] - - # amip - - &EXP_AMIP_START "1979" - - &EXP_AMIP_END "2020" - - &ANA_AMIP_START "1980" - - &ANA_AMIP_END "2020" - - - &PP_AMIP_CHUNK96 "P1Y" - - &PP_AMIP_CHUNK384 "P1Y" - - &PP_XYINTERP96 "180,288" - - &PP_XYINTERP384 "720,1152" - - # climo - - &EXP_CLIMO_START96 "0001" - - &EXP_CLIMO_END96 "0011" - - &ANA_CLIMO_START96 "0002" - - &ANA_CLIMO_END96 "0011" - - - &EXP_CLIMO_START384 "0001" - - &EXP_CLIMO_END384 "0006" - - &ANA_CLIMO_START384 "0002" - - &ANA_CLIMO_END384 "0006" - - # coupled - - &PP_CPLD_CHUNK_A "P5Y" - - &PP_CPLD_CHUNK_B "P20Y" - - # grids - - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" - - # compile information - - &release "f1a1r1" - - &INTEL "intel-classic" - - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" - - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" - -build: - # compile information - compileYaml: "compile_fail/compiile.yaml" - platformYaml: "compile_fail/platforms.yaml" - -shared: - # directories shared across tools - directories: &shared_directories - history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] - pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] - analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] - ptmp_dir: "/xtmp/$USER/ptmp" - fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" - - # shared pp settings - postprocess: - settings: &shared_settings - history_segment: "P1Y" - site: "ppan" - switches: &shared_switches - do_statics: True - do_timeavgs: True - clean_work: True - do_refinediag: False - do_atmos_plevel_masking: True - do_preanalysis: False - do_analysis: True - -experiments: - - name: "c96L65_am5f7b12r1_amip" - pp: - - diff --git a/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml b/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml deleted file mode 100644 index fbff287d..00000000 --- a/fre/yamltools/tests/AM5_example/fail_cases/combined-am5-wrong_datatype.yaml +++ /dev/null @@ -1,58 +0,0 @@ -name: am5-wrong_datatype -platform: ncrc5.intel23 -target: prod -build: - compileYaml: compile_fail/wrong_compile.yaml - platformYaml: compile_fail/wrong_platforms.yaml -compile: - experiment: am5 - container_addlibs: null - baremetal_linkerflags: null - src: - - component: FMS - repo: https://github.com/NOAA-GFDL/FMS.git - cppdefs: -DHAVE_GETTID -Duse_libMPI -Duse_netCDF - branch: 2022e+1 - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -platforms: -- name: ncrc5.intel - compiler: intel - modulesInit: - - " module use -a /ncrc/home2/fms/local/modulefiles \n" - - "source $MODULESHOME/init/sh \n" - modules: - - intel-classic/2022.2.1 - - fre/bronx-20 - - cray-hdf5/1.12.2.3 - - cray-netcdf/4.9.0.3 - fc: ftn - cc: cc - mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk - modelRoot: ${HOME}/fremake_canopy/test -- name: ncrc5.intel23 - compiler: intel - modulesInit: - - " module use -a /ncrc/home2/fms/local/modulefiles \n" - - "source $MODULESHOME/init/sh \n" - modules: - - intel-classic/2023.1.0 - - fre/bronx-20 - - cray-hdf5/1.12.2.3 - - cray-netcdf/4.9.0.3 - fc: ftn - cc: cc - mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk - modelRoot: ${HOME}/fremake_canopy/test -- name: hpcme.2023 - compiler: intel - RUNenv: - - . /spack/share/spack/setup-env.sh - - spack load libyaml - - spack load netcdf-fortran@4.5.4 - - spack load hdf5@1.14.0 - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: true - containerBuild: podman - containerRun: apptainer diff --git a/fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json b/fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json deleted file mode 100644 index dfb9cff5..00000000 --- a/fre/yamltools/tests/AM5_example/pp_yamls/pp-schema.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-06/schema#", - "title": "Schema for PP Yaml", - "type": "object", - "properties": { - "name": {"type": "string"}, - "platform": {"type": "string"}, - "target": {"type": "string"}, - "directories": { - "description": "FRE shared directories", - "type": "object", - "items":{"$ref": "#/$defs/dirs" } - }, - "postprocess": { - "description": "FRE post-processing information", - "type": "object", - "items":{"$ref": "#/$defs/pp" } - } - }, - "$defs": { - "dirs": { - "history_dir": {"type":"string"}, - "pp_dir": {"type":"string"}, - "ptmp_dir": {"type":"string"}, - "refinediag_scripts":{"type":["string","null"]}, - "preanalysis_script":{"type":["string","null"]}, - "history_refined":{"type":["string","null"]}, - "analysis_dir":{"type":["string","null"]}, - "pp_grid_spec": {"type":"string"}, - "fre_analysis_home": {"type":["string","null"]} - }, - "pp": { - "type": "object", - "properties": { - "settings": { - "type:": "object", - "properties": { - "history_segment": {"type":"string"}, - "site": {"type":"string"}, - "pp_chunk_a": {"type":"string"}, - "pp_chunk_b": {"type":"string"}, - "pp_start": {"type":"string"}, - "pp_stop": {"type":"string"}, - "pp_components": {"type":"string"} - } - }, - "switches": { - "type": "object", - "properties": { - "clean_work": {"type":"boolean"}, - "do_mdtf": {"type":"boolean"}, - "do_statics": {"type":"boolean"}, - "do_timeavgs": {"type":"boolean"}, - "do_refinediag": {"type":"boolean"}, - "do_atmos_plevel_masking": {"type":"boolean"}, - "do_preanalysis": {"type":"boolean"}, - "do_analysis": {"type":"boolean"}, - "do_analysis_only": {"type":"boolean"} - } - }, - "components": { - "type": "array", - "properties": { - "type": {"type":"string"}, - "sources": {"type":"string"}, - "sourceGrid": {"type":"string"}, - "xyInterp": {"type":"string"}, - "interpMethod": {"type":"string"}, - "inputRealm": {"type":"string"} - } - } - } - } - } -} diff --git a/fre/yamltools/tests/AM5_example/pp_yamls/schema.json b/fre/yamltools/tests/AM5_example/pp_yamls/schema.json new file mode 120000 index 00000000..a76dc8fc --- /dev/null +++ b/fre/yamltools/tests/AM5_example/pp_yamls/schema.json @@ -0,0 +1 @@ +../../../../pp/schema.json \ No newline at end of file diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index 9fd1d349..dbe24fe8 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -4,7 +4,7 @@ import shutil import json import yaml -from jsonschema import validate +from jsonschema import validate, ValidationError, SchemaError from fre.yamltools import combine_yamls as cy from multiprocessing import Process @@ -15,15 +15,17 @@ test_dir = Path("fre/yamltools/tests") in_dir = Path(f"{CWD}/{test_dir}/AM5_example") -# Create output directory -out_dir = Path(f"{CWD}/{test_dir}/combine_yamls_out") +# Create output directories +comp_out_dir = Path(f"{CWD}/{test_dir}/combine_yamls_out/compile") +pp_out_dir = Path(f"{CWD}/{test_dir}/combine_yamls_out/pp") # If output directory exists, remove and create again -if out_dir.exists(): - shutil.rmtree(out_dir) - Path(out_dir).mkdir(parents=True,exist_ok=True) -else: - Path(out_dir).mkdir(parents=True,exist_ok=True) +for out in [comp_out_dir, pp_out_dir]: + if out.exists(): + shutil.rmtree(out) + Path(out).mkdir(parents=True,exist_ok=True) + else: + Path(out).mkdir(parents=True,exist_ok=True) ## Set what would be click options # Compile @@ -70,20 +72,17 @@ def test_merged_compile_yamls(): cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) # Move combined yaml to output location - comp_outDir = os.path.join(out_dir,"compile_yaml") - Path(comp_outDir).mkdir(parents=True,exist_ok=True) - shutil.move(f"combined-am5.yaml", comp_outDir) + shutil.move(f"combined-am5.yaml", comp_out_dir) # Check that the combined yaml exists - assert Path(f"{comp_outDir}/combined-{COMP_EXPERIMENT}.yaml").exists() + assert Path(f"{comp_out_dir}/combined-{COMP_EXPERIMENT}.yaml").exists() def test_combined_compileyaml_validation(): """ Validate the combined compile yaml """ - combined_yamlfile =f"{out_dir}/compile_yaml/combined-{COMP_EXPERIMENT}.yaml" - schema_dir = Path(f"{in_dir}/compile_yamls") - schema_file = os.path.join(schema_dir, 'compile-schema.json') + combined_yamlfile =f"{comp_out_dir}/combined-{COMP_EXPERIMENT}.yaml" + schema_file = os.path.join(f"{in_dir}","compile_yamls","schema.json") with open(combined_yamlfile,'r') as cf: yml = yaml.safe_load(cf) @@ -92,7 +91,12 @@ def test_combined_compileyaml_validation(): s = f.read() schema = json.loads(s) - validate(instance=yml,schema=schema) + # If the yaml is valid, no issues + # If the yaml is not valid, error + try: + validate(instance=yml,schema=schema) + except: + assert False def test_combined_compileyaml_combinefail(): """ @@ -100,17 +104,21 @@ def test_combined_compileyaml_combinefail(): the combine fails. (compile yaml path misspelled) """ # Go into the input directory - os.chdir(f"{in_dir}/fail_cases") + os.chdir(f"{in_dir}/compile_yamls/compile_fail") # Model yaml path - modelyaml = f"am5-wrong_compile.yaml" + modelyaml = f"am5-wrong_compilefile.yaml" USE = "compile" # Merge the yamls - should fail since there is no compile yaml specified in the model yaml try: consolidate = cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) + # Move combined yaml to output location + shutil.move(f"combined-am5-wrong_compilefile.yaml", comp_out_dir) except: print("EXPECTED FAILURE") + # Move combined yaml to output location + shutil.move(f"combined-am5-wrong_compilefile.yaml", comp_out_dir) assert True def test_combined_compileyaml_validatefail(): @@ -119,7 +127,7 @@ def test_combined_compileyaml_validatefail(): Branch should be string """ # Go into the input directory - os.chdir(f"{in_dir}/fail_cases") + os.chdir(f"{in_dir}/compile_yamls/compile_fail") # Model yaml path modelyaml = "am5-wrong_datatype.yaml" @@ -128,19 +136,27 @@ def test_combined_compileyaml_validatefail(): # Merge the yamls cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) - wrong_combined = "combined-am5-wrong_datatype.yaml" + # Move combined yaml to output location + shutil.move(f"combined-am5-wrong_datatype.yaml", comp_out_dir) - schema_dir = Path(f"{in_dir}/compile_yamls") - schema_file = os.path.join(schema_dir, 'compile-schema.json') + # Validate against schema; should fail + wrong_combined = Path(f"{comp_out_dir}/combined-am5-wrong_datatype.yaml") + schema_file = os.path.join(f"{in_dir}","compile_yamls","schema.json") + # Open/load combined yaml file with open(wrong_combined,'r') as cf: yml = yaml.safe_load(cf) + # Open/load schema.jaon with open(schema_file,'r') as f: s = f.read() schema = json.loads(s) - validate(instance=yml,schema=schema) + # Validation should fail + try: + validate(instance=yml,schema=schema) + except: + assert True ############ PP ############ def test_expyaml_exists(): @@ -172,20 +188,18 @@ def test_merged_pp_yamls(): cy._consolidate_yamls(modelyaml, PP_EXPERIMENT, PP_PLATFORM, PP_TARGET, USE) # Move combined yaml to output location - pp_outDir = os.path.join(out_dir,"pp_yaml") - Path(pp_outDir).mkdir(parents=True,exist_ok=True) - shutil.move(f"combined-{PP_EXPERIMENT}.yaml", pp_outDir) + shutil.move(f"combined-{PP_EXPERIMENT}.yaml", pp_out_dir) # Check that the combined yaml exists - assert Path(f"{pp_outDir}/combined-{PP_EXPERIMENT}.yaml").exists() + assert Path(f"{pp_out_dir}/combined-{PP_EXPERIMENT}.yaml").exists() def test_combined_ppyaml_validation(): """ Validate the combined compile yaml """ - combined_yamlfile =f"{out_dir}/pp_yaml/combined-{PP_EXPERIMENT}.yaml" + combined_yamlfile =f"{pp_out_dir}/combined-{PP_EXPERIMENT}.yaml" schema_dir = Path(f"{in_dir}/pp_yamls") - schema_file = os.path.join(schema_dir, 'pp-schema.json') + schema_file = os.path.join(schema_dir, 'schema.json') with open(combined_yamlfile,'r') as cf: yml = yaml.safe_load(cf) From 77e123cfaf2ffe7f1b4ccf0123cfbb81da85b2a6 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 25 Sep 2024 11:34:53 -0400 Subject: [PATCH 50/61] #141 Wrap code in function and add error check --- fre/make/createCheckout.py | 1 + fre/make/createCompile.py | 16 ++++------------ fre/make/createDocker.py | 15 ++++----------- fre/make/createMakefile.py | 15 +++------------ fre/make/runFremake.py | 15 ++++----------- fre/yamltools/combine_yamls.py | 26 +++++++++++++++++++++++--- 6 files changed, 39 insertions(+), 49 deletions(-) diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index 8ed133e9..6c421c0d 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -37,6 +37,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v tlist = target # Combine model, compile, and platform yamls + # Default behavior - combine yamls / rewrite combined yaml comb = cy.init_compile_yaml(yml,platform,target) full_combined = cy.get_combined_compileyaml(comb) diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index 07745bbc..74f27746 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -31,19 +31,12 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): plist = platform tlist = target - ## If combined yaml does not exist, combine model, compile, and platform yamls - cd = Path.cwd() + # Combined compile yaml file combined = Path(f"combined-{name}.yaml") - combined_path=os.path.join(cd,combined) - # Combine model, compile, and platform yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -67,7 +60,6 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) - ## Make the bldDir based on the modelRoot, the platform, and the target srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index 7170ac97..044120f7 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -19,19 +19,12 @@ def dockerfile_create(yamlfile,platform,target,execute): name = yamlfile.split(".")[0] run = execute - ## If combined yaml does not exist, combine model, compile, and platform yamls - cd = Path.cwd() + # Combined compile yaml file combined = Path(f"combined-{name}.yaml") - combined_path=os.path.join(cd,combined) - # Combine model, compile, and platform yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index 9758c5a5..f50f84cb 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -18,19 +18,11 @@ def makefile_create(yamlfile,platform,target): yml = yamlfile name = yamlfile.split(".")[0] - ## If combined yaml does not exist, combine model, compile, and platform yamls - cd = Path.cwd() combined = Path(f"combined-{name}.yaml") - combined_path=os.path.join(cd,combined) - # Combine model, compile, and platform yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) @@ -50,7 +42,6 @@ def makefile_create(yamlfile,platform,target): raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) - ## Make the bldDir based on the modelRoot, the platform, and the target srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 821336c5..fe0b2748 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -39,19 +39,12 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb plist = platform tlist = target - ## If combined yaml does not exist, combine model, compile, and platform yamls - cd = Path.cwd() + # Combined compile yaml file combined = Path(f"combined-{name}.yaml") - combined_path=os.path.join(cd,combined) - # Combine model, compile, and platform yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) ## Get the variables in the model yaml freVars = varsfre.frevars(full_combined) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index fb2d24dc..7d9ff585 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -295,7 +295,7 @@ def clean_yaml(self): print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") return self.combined -## Functions to combine the yaml files +## Functions to combine the yaml files ## def get_combined_compileyaml(comb): """ Combine the model, compile, and platform yamls @@ -313,6 +313,25 @@ def get_combined_compileyaml(comb): return full_combined +def combined_compile_existcheck(combined,yml,platform,target): + """ + Checks for if combined compile yaml exists already. + If not, combine model, compile, and platform yamls. + """ + cd = Path.cwd() + combined_path=os.path.join(cd,combined) + + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): + full_combined = combined_path + print("\nNOTE: Yamls previously merged.") + else: + comb = init_compile_yaml(yml,platform,target) + full_combined = get_combined_compileyaml(comb) + + return full_combined + def get_combined_ppyaml(comb): """ Combine the model, experiment, and analysis yamls @@ -342,11 +361,12 @@ def _consolidate_yamls(yamlfile,experiment,platform,target,use): combined = init_compile_yaml(yamlfile, platform, target) # Create combined compile yaml get_combined_compileyaml(combined) - - if use =="pp": + elif use =="pp": combined = init_pp_yaml(yamlfile,experiment,platform,target) # Create combined pp yaml get_combined_ppyaml(combined) + else: + raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") @click.command() def consolidate_yamls(yamlfile,experiment,platform,target,use): From 5736653b0dcd68ec274d2f4754c129b4736a832a Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 25 Sep 2024 11:48:12 -0400 Subject: [PATCH 51/61] #141 Set acceptable values for click option --- fre/yamltools/freyamltools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index 70ff149a..fc144a5e 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -37,7 +37,7 @@ def function(context, uppercase): help="Target name", required=True) @click.option("--use", - type=str, + type=click.Choice(['compile','pp']), help="Process user is combining yamls for. Can pass 'compile' or 'pp'", required=True) @click.pass_context From 2f7add39e3666ec06041999612e3c7b7365b8a3d Mon Sep 17 00:00:00 2001 From: Chris Blanton Date: Wed, 25 Sep 2024 13:19:03 -0400 Subject: [PATCH 52/61] Update conda package version to 2024.01 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d9b97785..751efcc1 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name='fre-cli', - version='0.1.6', + version='2024.01', description='Command Line Interface for FRE commands', author='MSD Workflow Team, Bennett Chang, Dana Singh, Chris Blanton', author_email='oar.gfdl.workflow@noaa.gov', From fd2d8fc7233a3267076d518330f1953270e4f30e Mon Sep 17 00:00:00 2001 From: Dana Singh <115384427+singhd789@users.noreply.github.com> Date: Fri, 27 Sep 2024 00:58:27 -0400 Subject: [PATCH 53/61] Add pathlib import --- fre/make/runFremake.py | 1 + 1 file changed, 1 insertion(+) diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index fe0b2748..006f195c 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -9,6 +9,7 @@ import logging from multiprocessing.dummy import Pool import click +from pathlib import Path from .gfdlfremake import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal import fre.yamltools.combine_yamls as cy From 19795a143221d511c3fd8ce91d31badbb4175a0b Mon Sep 17 00:00:00 2001 From: Chris Blanton Date: Fri, 27 Sep 2024 12:18:29 -0400 Subject: [PATCH 54/61] Updated "fre catalog builder" test Previously, the test verified the exit code and message for (fre-cli) c2b:~/git/fre-cli%>fre catalog builder 134 No paths given, using yaml configuration (fre-cli) c2b:~/git/fre-cli%>echo $status 135 1 Recently, the message was updated to be: (fre-cli) c2b:~/git/fre-cli%>fre catalog builder 134 Missing: input_path or output_path. Pass it in the config yaml or as command-line option (fre-cli) c2b:~/git/fre-cli%>echo $status 135 1 So let's continue to check for the exit code and not require an exact error message. --- fre/tests/test_fre_catalog_cli.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/fre/tests/test_fre_catalog_cli.py b/fre/tests/test_fre_catalog_cli.py index 831003bf..a4fcf27d 100644 --- a/fre/tests/test_fre_catalog_cli.py +++ b/fre/tests/test_fre_catalog_cli.py @@ -24,12 +24,7 @@ def test_cli_fre_catalog_opt_dne(): def test_cli_fre_catalog_builder(): ''' fre catalog builder ''' result = runner.invoke(fre.fre, args=["catalog", "builder"]) - assert all( [ - result.exit_code == 1, - 'No paths given, using yaml configuration' - in result.stdout.split('\n') - ] - ) + assert result.exit_code == 1 def test_cli_fre_catalog_builder_help(): ''' fre catalog builder --help ''' From d564ed8a2d78e4fdc4092ca07b42860f9c7172a2 Mon Sep 17 00:00:00 2001 From: Chris Blanton Date: Fri, 27 Sep 2024 12:54:32 -0400 Subject: [PATCH 55/61] Updated "fre catalog builder" test Previously, the test verified the exit code and message for (fre-cli) c2b:~/git/fre-cli%>fre catalog builder No paths given, using yaml configuration (fre-cli) c2b:~/git/fre-cli%>echo $status 1 Recently, the message was updated to be: (fre-cli) c2b:~/git/fre-cli%>fre catalog builder Missing: input_path or output_path. Pass it in the config yaml or as command-line option (fre-cli) c2b:~/git/fre-cli%>echo $status 1 So the test needs a corresponding change to test for the updated error message --- fre/tests/test_fre_catalog_cli.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/fre/tests/test_fre_catalog_cli.py b/fre/tests/test_fre_catalog_cli.py index a4fcf27d..e6464007 100644 --- a/fre/tests/test_fre_catalog_cli.py +++ b/fre/tests/test_fre_catalog_cli.py @@ -24,7 +24,12 @@ def test_cli_fre_catalog_opt_dne(): def test_cli_fre_catalog_builder(): ''' fre catalog builder ''' result = runner.invoke(fre.fre, args=["catalog", "builder"]) - assert result.exit_code == 1 + assert all( [ + result.exit_code == 1, + 'Missing: input_path or output_path. Pass it in the config yaml or as command-line option' + in result.stdout.split('\n') + ] + ) def test_cli_fre_catalog_builder_help(): ''' fre catalog builder --help ''' From f48c7abb62f347a73156cbd0773cbe50d4305798 Mon Sep 17 00:00:00 2001 From: Chris Blanton Date: Fri, 27 Sep 2024 15:50:56 -0400 Subject: [PATCH 56/61] #141 Update yaml test to chdir back to original pytest directory. Otherwise, the cmor test will fail when run afterwards --- fre/yamltools/tests/test_combine_yamls.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index dbe24fe8..25b8a2a5 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -77,6 +77,9 @@ def test_merged_compile_yamls(): # Check that the combined yaml exists assert Path(f"{comp_out_dir}/combined-{COMP_EXPERIMENT}.yaml").exists() + # Go back to original directory + os.chdir(CWD) + def test_combined_compileyaml_validation(): """ Validate the combined compile yaml @@ -121,6 +124,9 @@ def test_combined_compileyaml_combinefail(): shutil.move(f"combined-am5-wrong_compilefile.yaml", comp_out_dir) assert True + # Go back to original directory + os.chdir(CWD) + def test_combined_compileyaml_validatefail(): """ Check if the schema is validating correctly @@ -157,6 +163,9 @@ def test_combined_compileyaml_validatefail(): validate(instance=yml,schema=schema) except: assert True + + # Go back to original directory + os.chdir(CWD) ############ PP ############ def test_expyaml_exists(): @@ -193,6 +202,9 @@ def test_merged_pp_yamls(): # Check that the combined yaml exists assert Path(f"{pp_out_dir}/combined-{PP_EXPERIMENT}.yaml").exists() + # Go back to original directory + os.chdir(CWD) + def test_combined_ppyaml_validation(): """ Validate the combined compile yaml From d27a4bcc70b802cbe99bb56dbb4410752067e10f Mon Sep 17 00:00:00 2001 From: Chris Blanton Date: Fri, 27 Sep 2024 16:34:16 -0400 Subject: [PATCH 57/61] #141 Add one more chdir back to original working directory --- fre/pp/tests/test_configure_script_yaml.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 6793dcbc..1f61efd4 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -44,3 +44,6 @@ def test_configure_script(): Path(f"{out_dir}/rose-suite.conf").exists(), Path(f"{out_dir}/app/regrid-xy/rose-app.conf").exists(), Path(f"{out_dir}/app/remap-pp-components/rose-app.conf").exists()]) + + # Go back to original directory + os.chdir(CWD) From c86affb414a449f88081d522660d7ba8ba51260c Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 30 Sep 2024 10:07:17 -0400 Subject: [PATCH 58/61] #187 Add fremake README back in --- fre/make/README.md | 180 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 180 insertions(+) create mode 100644 fre/make/README.md diff --git a/fre/make/README.md b/fre/make/README.md new file mode 100644 index 00000000..ff612004 --- /dev/null +++ b/fre/make/README.md @@ -0,0 +1,180 @@ +# **Fremake Canopy** +Through the fre-cli, `fre make` can be used to create and run a checkout script, makefile, and compile a model. + +* Fremake Canopy Supports: + - multiple targets; use `-t` flag to define each target + - bare-metal build + - container creation + - parallel checkouts for bare-metal build** + +** **Note: Users will not be able to create containers without access to podman** + +The fremake canopy fre-cli subcommands are described below ([Subcommands](#subcommands)), as well as a Guide on the order in which to use them ([Guide](#guide)). + +Additionally, as mentioned, multiple targets can be used more multiple target-platform combinations. Below is an example of this usage for both the bare-metal build and container build, using the AM5 model + +- [Bare-metal Example](#bare-metal-build) +- [Bare-metal Multi-target Example](#bare-metal-build-multi-target) +- [Container Example](#container-build) + +## **Usage (Users)** +* Refer to fre-cli [README.md](https://github.com/NOAA-GFDL/fre-cli/blob/main/README.md) for foundational fre-cli usage guide and tips. + +## **Quickstart** +### **Bare-metal Build:** +```bash +# Create checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod + +# Create and run checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod --execute + +# Create Makefile +fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod + +# Create the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod + +# Create and run the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod --execute +``` +### **Bare-metal Build Multi-target:** +```bash +# Create checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug + +# Create and run checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute + +# Create Makefile +fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + +# Create the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + +# Create and run the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute +``` + +### **Container Build:** +In order for the container to build successfully, a `-npc`, or `--no-parallel-checkout` is needed. +```bash +# Create checkout script +fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc + +# Create and run checkout script +fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc --execute + +# Create Makefile +fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod + +# Create Dockerfile +fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod + +# Create and run the Dockerfile +fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod --execute +``` + +### **Run all of fremake:** +```bash +# Bare-metal +fre make run-fremake -y am5.yaml -p ncrc5.intel23 -t prod + +# Container +fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod -npc +``` + +## Subtools +- `fre make create-checkout [options]` + - Purpose: Creates the checkout script and can check out source code (with execute option) + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-npc, --no-parallel-checkout (for container build)` + - `-e, --execute` + +- `fre make create-makefile [options]` + - Purpose: Creates the makefile + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + +- `fre make create-compile [options]` + - Purpose: Creates the compile script and compiles the model (with execute option) + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-n, --parallel [number of concurrent modile compiles]` + - `-e, --execute` + +- `fre make create-dockerfile [options]` + - Purpose: Creates the dockerfile and creates the container (with execute option) + - With the creation of the dockerfile, the Makefile, checkout script, and any other necessary script is copied into the container from a temporary location + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-e, --execute` + +- `fre make run-fremake [options]` + - Purpose: Create the checkout script, Makefile, compile script, and dockerfile (platform dependent) for the compilation of the model + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-npc, --no-parallel-checkout (for container build)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-n, --parallel [number of concurrent modile compiles]` + +## Guide +In order to use the `fre make` tools, remember to create a combined yaml first. This can be done with the `fre yamltools combine-yamls` tool. This combines the model, compile, platform, experiment, and any analysis yamls into ONE yaml file for parsing and validation. + +To combine: +`fre yamltools combine-yamls -y [model yaml file] -e [experiment name] -p [platform] -t [target]` + +### **Bare-metal Build:** +```bash +# Create checkout script +fre make create-checkout -y [model yaml file] -p [platform] -t [target] + +# Create and run checkout script +fre make create-checkout -y [model yaml file] -p [platform] -t [target] --execute + +# Create Makefile +fre make create-makefile -y [model yaml file] -p [platform] -t [target] + +# Creat the compile script +fre make create-compile -y [model yaml file] -p [platform] -t [target] + +# Create and run the compile script +fre make create-compile -y [model yaml file] -p [platform] -t [target] --execute + +# Run all of fremake +fre make run-fremake -y [model yaml file] -p [platform] -t [target] [other options...] +``` + +### **Container Build:** +For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. + +***To reiterate, users will not be able to create containers unless they have podman access on gaea.*** +```bash +# Create checkout script +fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] -npc + +# Create and run checkout script +fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + +# Create Makefile +fre make create-makefile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + +#Create a Dockerfile +fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + +# Create and run the Dockerfile +fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute +``` From d4b0dfe87179a1be31665c0b1b3e883df437ff24 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 30 Sep 2024 10:16:56 -0400 Subject: [PATCH 59/61] #187 Update readme link --- fre/make/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/make/README.md b/fre/make/README.md index ff612004..961e564c 100644 --- a/fre/make/README.md +++ b/fre/make/README.md @@ -9,7 +9,7 @@ Through the fre-cli, `fre make` can be used to create and run a checkout script, ** **Note: Users will not be able to create containers without access to podman** -The fremake canopy fre-cli subcommands are described below ([Subcommands](#subcommands)), as well as a Guide on the order in which to use them ([Guide](#guide)). +The fremake canopy fre-cli subcommands are described below ([Subtools](#subtools)), as well as a Guide on the order in which to use them ([Guide](#guide)). Additionally, as mentioned, multiple targets can be used more multiple target-platform combinations. Below is an example of this usage for both the bare-metal build and container build, using the AM5 model From 05ad855cc0e32466093eaf1d217e0e1098deafb4 Mon Sep 17 00:00:00 2001 From: Dana Singh <115384427+singhd789@users.noreply.github.com> Date: Mon, 30 Sep 2024 10:41:28 -0400 Subject: [PATCH 60/61] Update yamltools README.md --- fre/yamltools/README.md | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/fre/yamltools/README.md b/fre/yamltools/README.md index fd0f8f55..aeb5ffff 100644 --- a/fre/yamltools/README.md +++ b/fre/yamltools/README.md @@ -1,6 +1,17 @@ ## FRE yamltools -### Tools: -- `combine-yaml.py`: creates a `combined-[experiment name].yaml` file in which the [model].yaml, compile.yaml, platforms.yaml, [experiment].yaml, and [analysisscript].yaml are merged +`fre yamltools` provides subtools that help to manage and perform operations on yaml files. + +## Subtools +- `fre yamltools combine-yamls [options]` + - Purpose: + - Creates combined yaml file in which the [model].yaml, compile.yaml, and platforms.yaml are merged if `--use compile` is specified + - Creates combined yaml file in which the [model].yaml, [experiment].yaml, and [analysis].yaml are merged if `--use pp` is specified + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-e, --experiment [experiment name]` + - `--use [compile|pp] (required)` ### **Tests** @@ -10,4 +21,4 @@ To run `fre yamltools` test scripts, return to root directory of the fre-cli rep Or run all tests with - python -m pytest fre/yamltools/tests + python -m pytests fre/yamltools/tests From b04732d241d6c5956a7c87442b04ae768310974f Mon Sep 17 00:00:00 2001 From: Dana Singh <115384427+singhd789@users.noreply.github.com> Date: Mon, 30 Sep 2024 10:42:59 -0400 Subject: [PATCH 61/61] Update README.md --- fre/yamltools/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/yamltools/README.md b/fre/yamltools/README.md index aeb5ffff..f6d896f3 100644 --- a/fre/yamltools/README.md +++ b/fre/yamltools/README.md @@ -21,4 +21,4 @@ To run `fre yamltools` test scripts, return to root directory of the fre-cli rep Or run all tests with - python -m pytests fre/yamltools/tests + python -m pytest fre/yamltools/tests