diff --git a/fre/make/README.md b/fre/make/README.md index bb1215cf..961e564c 100644 --- a/fre/make/README.md +++ b/fre/make/README.md @@ -2,26 +2,89 @@ Through the fre-cli, `fre make` can be used to create and run a checkout script, makefile, and compile a model. * Fremake Canopy Supports: - - multiple targets, would have to use one `-t` flag for each one + - multiple targets; use `-t` flag to define each target - bare-metal build - container creation - parallel checkouts for bare-metal build** -* **Note: Users will not be able to create containers without access to podman** +** **Note: Users will not be able to create containers without access to podman** -The fremake canopy fre-cli subcommands are described below ([Subcommands](#subcommands)), as well as a Guide on the order in which to use them ([Guide](#guide)). +The fremake canopy fre-cli subcommands are described below ([Subtools](#subtools)), as well as a Guide on the order in which to use them ([Guide](#guide)). Additionally, as mentioned, multiple targets can be used more multiple target-platform combinations. Below is an example of this usage for both the bare-metal build and container build, using the AM5 model -- [Bare-metal Example](#bare-metal-build-multi-target-example) -- [Container Example](#container-build-multi-target-example) +- [Bare-metal Example](#bare-metal-build) +- [Bare-metal Multi-target Example](#bare-metal-build-multi-target) +- [Container Example](#container-build) ## **Usage (Users)** * Refer to fre-cli [README.md](https://github.com/NOAA-GFDL/fre-cli/blob/main/README.md) for foundational fre-cli usage guide and tips. -* Fremake package repository located at: https://gitlab.gfdl.noaa.gov/portable_climate/fremake_canopy/-/tree/main +## **Quickstart** +### **Bare-metal Build:** +```bash +# Create checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod + +# Create and run checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod --execute + +# Create Makefile +fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod + +# Create the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod + +# Create and run the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod --execute +``` +### **Bare-metal Build Multi-target:** +```bash +# Create checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug + +# Create and run checkout script +fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute + +# Create Makefile +fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + +# Create the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + +# Create and run the compile script +fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute +``` + +### **Container Build:** +In order for the container to build successfully, a `-npc`, or `--no-parallel-checkout` is needed. +```bash +# Create checkout script +fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc + +# Create and run checkout script +fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc --execute + +# Create Makefile +fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod + +# Create Dockerfile +fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod + +# Create and run the Dockerfile +fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod --execute +``` -## Subcommands +### **Run all of fremake:** +```bash +# Bare-metal +fre make run-fremake -y am5.yaml -p ncrc5.intel23 -t prod + +# Container +fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod -npc +``` + +## Subtools - `fre make create-checkout [options]` - Purpose: Creates the checkout script and can check out source code (with execute option) - Options: @@ -69,87 +132,49 @@ Additionally, as mentioned, multiple targets can be used more multiple target-pl - `-n, --parallel [number of concurrent modile compiles]` ## Guide -### **Bare-metal Build:** -```bash -# Create checkout script -fre make create-checkout -y [experiment yaml file] -p [platform] -t [target] - -# Create and run checkout script -fre make create-checkout -y [experiment yaml file] -p [platform] -t [target] -e +In order to use the `fre make` tools, remember to create a combined yaml first. This can be done with the `fre yamltools combine-yamls` tool. This combines the model, compile, platform, experiment, and any analysis yamls into ONE yaml file for parsing and validation. -# Create Makefile -fre make create-makefile -y [experiment yaml file] -p [platform] -t [target] - -# Creat the compile script -fre make create-compile -y [experiment yaml file] -p [platform] -t [target] - -# Create and run the compile script -fre make create-compile -y [experiment yaml file] -p [platform] -t [target] -e +To combine: +`fre yamltools combine-yamls -y [model yaml file] -e [experiment name] -p [platform] -t [target]` -# Run all of fremake -fre make run-fremake -y [experiment yaml] -p [platform] -t [target] [other options...] -``` - -### **Bare-metal Build (Multi-target example):** +### **Bare-metal Build:** ```bash # Create checkout script -fre make create-checkout -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug +fre make create-checkout -y [model yaml file] -p [platform] -t [target] # Create and run checkout script -fre make create-checkout -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug -e +fre make create-checkout -y [model yaml file] -p [platform] -t [target] --execute # Create Makefile -fre make create-makefile -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug +fre make create-makefile -y [model yaml file] -p [platform] -t [target] # Creat the compile script -fre make create-compile -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug +fre make create-compile -y [model yaml file] -p [platform] -t [target] # Create and run the compile script -fre make create-compile -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug -e +fre make create-compile -y [model yaml file] -p [platform] -t [target] --execute # Run all of fremake -fre make run-fremake -y am5.yaml -p ncrc5.intel -t prod-openmp -t debug [other options...] +fre make run-fremake -y [model yaml file] -p [platform] -t [target] [other options...] ``` ### **Container Build:** -For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. ***To reiterate, users will not be able to create containers unless they have podman access on gaea.*** +For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. + +***To reiterate, users will not be able to create containers unless they have podman access on gaea.*** ```bash # Create checkout script -fre make create-checkout -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] -npc +fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] -npc # Create and run checkout script -fre make create-checkout -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] -e -npc +fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute # Create Makefile -fre make create-makefile -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] - -# Create the compile script -fre make create-compile -y [experiment yaml file] -p [CONTAINER PLATFORM]-t [target] +fre make create-makefile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] #Create a Dockerfile -fre make create-dockerfile -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] +fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] # Create and run the Dockerfile -fre make create-dockerfile -y [experiment yaml file] -p [CONTAINER PLATFORM] -t [target] +fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute ``` -### **Container Build (Multi-target example):** -```bash -# Create checkout script -fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -npc - -# Create and run checkout script -fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -npc -e - -# Create Makefile -fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug - -# Creat the compile script -fre make create-compile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug - -# Create and run the compile script -fre make create-compile -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug -e - -# Run all of fremake -fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod-openmp -t debug [other options...] -npc -``` - diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py index 97c34cc6..6c421c0d 100644 --- a/fre/make/createCheckout.py +++ b/fre/make/createCheckout.py @@ -1,17 +1,19 @@ #!/usr/bin/python3 import os +import subprocess import logging import sys import click +from pathlib import Path from .gfdlfremake import varsfre, platformfre, yamlfre, checkout, targetfre +import fre.yamltools.combine_yamls as cy @click.command() def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): # Define variables yml = yamlfile - ps = platform - ts = target + name = yamlfile.split(".")[0] run = execute jobs = str(jobs) pcheck = no_parallel_checkout @@ -34,11 +36,16 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v plist = platform tlist = target + # Combine model, compile, and platform yamls + # Default behavior - combine yamls / rewrite combined yaml + comb = cy.init_compile_yaml(yml,platform,target) + full_combined = cy.get_combined_compileyaml(comb) + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) - ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + ## Open the yaml file, validate the yaml, and parse as fremakeYaml + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets @@ -53,7 +60,7 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise ValueError (platformName + " does not exist in platforms.yaml") (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the source directory for the platform @@ -67,22 +74,34 @@ def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,v freCheckout = checkout.checkout("checkout.sh",srcDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) - click.echo("\nCheckout script created at " + srcDir + "/checkout.sh" + "\n") + click.echo("\nCheckout script created in "+ srcDir + "/checkout.sh \n") - # Run the checkout script - if run: - freCheckout.run() + # Run the checkout script + if run == True: + freCheckout.run() + else: + sys.exit() else: - sys.exit() + print("\nCheckout script PREVIOUSLY created in "+ srcDir + "/checkout.sh \n") + if run == True: + os.chmod(srcDir+"/checkout.sh", 0o744) + try: + subprocess.run(args=[srcDir+"/checkout.sh"], check=True) + except: + print("\nThere was an error with the checkout script "+srcDir+"/checkout.sh.", + "\nTry removing test folder: " + modelRoot +"\n") + raise + else: + sys.exit() + else: - ## Run the checkout script image="ecpe4s/noaa-intel-prototype:2023.09.25" bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) - click.echo("\nCheckout script created at " + srcDir + "/checkout.sh" + "\n") + click.echo("\nCheckout script created at " + tmpDir + "/checkout.sh" + "\n") if __name__ == "__main__": diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py index 24f23255..74f27746 100644 --- a/fre/make/createCompile.py +++ b/fre/make/createCompile.py @@ -3,17 +3,17 @@ import os import sys import logging +from pathlib import Path from multiprocessing.dummy import Pool - import click from .gfdlfremake import varsfre, platformfre, yamlfre, targetfre, buildBaremetal +import fre.yamltools.combine_yamls as cy @click.command() def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): # Define variables yml = yamlfile - ps = platform - ts = target + name = yamlfile.split(".")[0] nparallel = parallel jobs = str(jobs) run = execute @@ -31,11 +31,18 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): plist = platform tlist = target + # Combined compile yaml file + combined = Path(f"combined-{name}.yaml") + + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets @@ -50,7 +57,8 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py index fee5775f..044120f7 100644 --- a/fre/make/createDocker.py +++ b/fre/make/createDocker.py @@ -2,11 +2,13 @@ import os import sys +from pathlib import Path import click from .gfdlfremake import varsfre, targetfre, makefilefre, platformfre, yamlfre, buildDocker +import fre.yamltools.combine_yamls as cy @click.command() -def dockerfile_create(yamlfile, platform, target, execute): +def dockerfile_create(yamlfile,platform,target,execute): srcDir="src" checkoutScriptName = "checkout.sh" baremetalRun = False # This is needed if there are no bare metal runs @@ -14,12 +16,21 @@ def dockerfile_create(yamlfile, platform, target, execute): plist = platform tlist = target yml = yamlfile + name = yamlfile.split(".")[0] run = execute + # Combined compile yaml file + combined = Path(f"combined-{name}.yaml") + + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] @@ -30,7 +41,7 @@ def dockerfile_create(yamlfile, platform, target, execute): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) @@ -42,33 +53,21 @@ def dockerfile_create(yamlfile, platform, target, execute): bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName - freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], - libs = fremakeYaml["container_addlibs"], - srcDir = srcDir, - bldDir = bldDir, - mkTemplatePath = mkTemplate, - tmpDir = tmpDir) - - # Loop through components and send the component name and requires for the Makefile - for c in fremakeYaml['src']: - freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) - freMakefile.writeMakefile() - dockerBuild = buildDocker.container(base = image, exp = fremakeYaml["experiment"], libs = fremakeYaml["container_addlibs"], RUNenv = RUNenv, target = targetObject) dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") - dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir()+"/linkline.sh") + dockerBuild.writeDockerfileMakefile(tmpDir+"/Makefile", tmpDir+"/linkline.sh") + for c in fremakeYaml['src']: dockerBuild.writeDockerfileMkmf(c) dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") - currDir = os.getcwd() - click.echo("\ntmpDir created at " + currDir + "/tmp") - click.echo("Dockerfile created at " + currDir + "\n") + click.echo("\ntmpDir created in " + currDir + "/tmp") + click.echo("Dockerfile created in " + currDir +"\n") if run: dockerBuild.build(containerBuild, containerRun) diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py index e520f7c7..f50f84cb 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/createMakefile.py @@ -1,8 +1,11 @@ #!/usr/bin/python3 import os +import sys +from pathlib import Path import click from .gfdlfremake import makefilefre, varsfre, targetfre, yamlfre +import fre.yamltools.combine_yamls as cy @click.command() def makefile_create(yamlfile,platform,target): @@ -13,12 +16,19 @@ def makefile_create(yamlfile,platform,target): plist = platform tlist = target yml = yamlfile + name = yamlfile.split(".")[0] + combined = Path(f"combined-{name}.yaml") + + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) + ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() fremakeBuildList = [] @@ -29,7 +39,8 @@ def makefile_create(yamlfile,platform,target): if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" diff --git a/fre/make/fremake.py b/fre/make/fremake.py index 27cec2db..54946349 100644 --- a/fre/make/fremake.py +++ b/fre/make/fremake.py @@ -7,6 +7,7 @@ yamlfile_opt_help = """Experiment yaml compile FILE """ +experiment_opt_help = """Name of experiment""" platform_opt_help = """Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions """ @@ -102,8 +103,7 @@ def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel "--no-parallel-checkout", is_flag = True, help = no_parallel_checkout_opt_help) -@click.option("-e", - "--execute", +@click.option("--execute", is_flag = True, default = False, help = "Use this to run the created checkout script.") @@ -167,8 +167,7 @@ def create_makefile(context,yamlfile,platform,target): type = int, metavar = '', default = 1, help = parallel_opt_help) -@click.option("-e", - "--execute", +@click.option("--execute", is_flag = True, default = False, help = "Use this to run the created checkout script.") @@ -181,8 +180,6 @@ def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbos """ - Write the compile script """ context.forward(compile_create) - - @make_cli.command @click.option("-y", "--yamlfile", @@ -199,8 +196,7 @@ def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbos type = str, help = target_opt_help, required = True) -@click.option("-e", - "--execute", +@click.option("--execute", is_flag = True, help = "Build Dockerfile that has been generated by create-docker.") @click.pass_context diff --git a/fre/make/gfdlfremake/buildBaremetal.py b/fre/make/gfdlfremake/buildBaremetal.py index d3be45f0..f0279a11 100644 --- a/fre/make/gfdlfremake/buildBaremetal.py +++ b/fre/make/gfdlfremake/buildBaremetal.py @@ -2,96 +2,126 @@ ## \date 2023 ## \author Tom Robinson ## \email thomas.robinson@noaa.gov -## \description +## \description import subprocess import os -from . import targetfre -## \brief Called for parallel execution purposes. Runs the builds. -## \param fremakeBuildList the fremakeBuild object list passes by pool.map + def fremake_parallel(fremakeBuildList): - fremakeBuildList.run() + """ + Brief: Called for parallel execution purposes. Runs the builds. + Param: + - fremakeBuildList : fremakeBuild object list passes by pool.map + """ + fremakeBuildList.run() class buildBaremetal(): -## \brief Creates the build script to compile the model -## \param self The buildScript object -## \param exp The experiment name -## \param mkTemplatePath The template used by mkmf to compile the model -## \param srcDir The source directory -## \param bldDir The build directory -## \param modules The list of modules to load before compilation -## \param modulesInit A list of commands with new line characters to initialize modules - def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jobs): - self.e = exp - self.t = target.gettargetName() - self.src = srcDir - self.bld = bldDir - self.make = "make --jobs="+str(jobs)+" "+target.getmakeline_add() #make line - self.mkmf = True - self.template = mkTemplatePath - self.modules = "" - for m in modules: - self.modules = self.modules +" "+ m -## Set up the top portion of the compile script - self.setup=[ "#!/bin/sh -fx \n", - "bld_dir="+self.bld+"/ \n", - "src_dir="+self.src+"/ \n", - "mkmf_template="+self.template+" \n"] - if self.modules != "": - self.setup.extend(modulesInit) #extend because this is a list - self.setup.append("module load "+self.modules+" \n") # Append because this is a single string -## Create the build directory - os.system("mkdir -p "+self.bld) -## Create the compile script - self.f=open(self.bld+"/compile.sh","w") - self.f.writelines(self.setup) -## \brief Adds components to the build script -## \param self The build script object -## \param c Component from the compile yaml - def writeBuildComponents(self, c): -# Shorthand for component - comp = c["component"] -# Make the component directory - self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") -# Get the paths needed for compiling - pstring = "" - for paths in c["paths"]: - pstring = pstring+"$src_dir/"+paths+" " -# Run list_paths - self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") - self.f.write(" cd $bld_dir/"+comp+"\n") -# Create the mkmf line - if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: #Has requirements -#Set up the requirements as a string to inclue after the -o - reqstring = "" - for r in c["requires"]: - reqstring = reqstring+"-I$bld_dir/"+r+" " -#Figure out if we need the preprocessor - if c["doF90Cpp"]: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") -## Finishes and writes the build script -## \param self The buildScript object + """ + Brief: Creates the build script to compile the model + Param: + - self : The buildScript object + - exp : The experiment name + - mkTemplatePath : The template used by mkmf to compile the model + - srcDir : The source directory + - bldDir : The build directory + - modules : The list of modules to load before compilation + - modulesInit : A list of commands with new line characters to initialize modules + """ + def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jobs): + """ + Initialize variables and set-up the compile script. + """ + self.e = exp + self.t = target.gettargetName() + self.src = srcDir + self.bld = bldDir + self.make = "make --jobs="+str(jobs)+" "+target.getmakeline_add() #make line + self.mkmf = True + self.template = mkTemplatePath + self.modules = "" + for m in modules: + self.modules = self.modules +" "+ m + + ## Set up the top portion of the compile script + self.setup=[ "#!/bin/sh -fx \n", + "bld_dir="+self.bld+"/ \n", + "src_dir="+self.src+"/ \n", + "mkmf_template="+self.template+" \n"] + if self.modules != "": + self.setup.extend(modulesInit) #extend - this is a list + self.setup.append("module load "+self.modules+" \n") # Append -this is a single string + + ## Create the build directory + os.system("mkdir -p "+self.bld) + + ## Create the compile script + self.f=open(self.bld+"/compile.sh","w") + self.f.writelines(self.setup) + + def writeBuildComponents(self, c): + """ + Brief: Adds components to the build script + Param: + - self : The build script object + - c : Component from the compile yaml + """ + # Shorthand for component + comp = c["component"] + + # Make the component directory + self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") + + # Get the paths needed for compiling + pstring = "" + for paths in c["paths"]: + pstring = pstring+"$src_dir/"+paths+" " + + # Run list_paths + self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") + self.f.write(" cd $bld_dir/"+comp+"\n") + + # Create the mkmf line + # If this lib doesnt have any code dependencies and + # it requires the preprocessor (no -o and yes --use-cpp) + if c["requires"] == [] and c["doF90Cpp"]: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: #Has requirements + #Set up the requirements as a string to inclue after the -o + reqstring = "" + for r in c["requires"]: + reqstring = reqstring+"-I$bld_dir/"+r+" " + + #Figure out if we need the preprocessor + if c["doF90Cpp"]: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + ##TODO: add targets input - def writeScript(self): - self.f.write("cd "+self.bld+"\n") - self.f.write(self.make+"\n") - self.f.close() -## Run the build script -## \param self The dockerfile object + def writeScript(self): + """ + Brief: Finishes and writes the build script + Param: + - self : The buildScript object + """ + self.f.write("cd "+self.bld+"\n") + self.f.write(self.make+"\n") + self.f.close() + ## TODO run as a batch job on the login cluster - def run(self): + def run(self): + """ + Brief: Run the build script + Param: + - self : The dockerfile object + """ ###### TODO make the Makefile - os.chmod(self.bld+"/compile.sh", 0o744) - command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] - try: - subprocess.run(args=command, check=True) - except: - print("There was an error running "+self.bld+"/compile.sh") - raise - + os.chmod(self.bld+"/compile.sh", 0o744) + command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] + try: + subprocess.run(args=command, check=True) + except: + print("There was an error running "+self.bld+"/compile.sh") + raise diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 92f418b0..ee93ecd1 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -2,152 +2,199 @@ ## \date 2023 ## \author Tom Robinson ## \email thomas.robinson@noaa.gov -## \description +## \description import os -from . import targetfre class container(): -## \brief Opens the Dockerfile for writing -## \param self The dockerfile object -## \param base The docker base image to start from -## \param libs Additional libraries defined by user -## \param exp The experiment name -## \param RUNenv The commands that have to be run at the beginning of a RUN in the dockerfile -## to set up the environment - def __init__(self,base,exp,libs,RUNenv,target): - self.base = base - self.e = exp - self.l = libs - self.src = "/apps/"+self.e+"/src" - self.bld = "/apps/"+self.e+"/exec" - self.mkmf = True - self.target = target - self.template = "/apps/mkmf/templates/hpcme-intel21.mk" - if RUNenv == "": - self.setup = ["RUN \\ \n"] - else: - self.setup = ["RUN "+RUNenv[0]+" \\ \n"] - self.setup - for env in RUNenv[1:]: - self.setup.append(" && "+env+" \\ \n") - if self.l: - for l in self.l: - self.setup.append(" && spack load "+l+" \\ \n") - self.mkmfclone=["RUN cd /apps \\ \n", - " && git clone --recursive https://github.com/NOAA-GFDL/mkmf \\ \n", - " && cp mkmf/bin/* /usr/local/bin \n"] - self.bldsetup=["RUN bld_dir="+self.bld+" \\ \n", - " && src_dir="+self.src+" \\ \n", - " && mkmf_template="+self.template+ " \\ \n"] - self.d=open("Dockerfile","w") - self.d.writelines("FROM "+self.base+" \n") -## \brief writes to the checkout part of the Dockerfile and sets up the compile -## \param self The dockerfile object -## \param cScriptName The name of the checkout script in the container -## \param cOnDisk The relative path to the checkout script on disk - def writeDockerfileCheckout(self, cScriptName, cOnDisk): - self.checkoutPath = "/apps/"+self.e+"/src/"+ cScriptName - self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") - self.d.write("RUN chmod 744 /apps/"+self.e+"/src/checkout.sh \n") - self.d.writelines(self.setup) - self.d.write(" && /apps/"+self.e+"/src/checkout.sh \n") -# Clone mkmf - self.d.writelines(self.mkmfclone) -## Copies the Makefile into the bldDir in the dockerfile -## \param self The dockerfile object -## \param makefileOnDiskPath The path to Makefile on the local disk -## \param linklineonDiskPath The path to the link line script on the local disk - def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): - # Set up the bldDir - # If no additional libraries defined - if self.l == None: - self.bldCreate=["RUN mkdir -p "+self.bld+" \n", - "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n"] - self.d.writelines(self.bldCreate) - # If additional libraries defined - if self.l != None: - self.bldCreate=["RUN mkdir -p "+self.bld+" \n", - "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n", - "RUN chmod +rw "+self.bld+"/Makefile \n", - "COPY "+ linklineonDiskPath +" "+self.bld+"/linkline.sh \n", - "RUN chmod 744 "+self.bld+"/linkline.sh \n"] - self.d.writelines(self.bldCreate) - self.d.writelines(self.setup) - self.d.write(" && "+self.bld+"/linkline.sh \n") - -## \brief Adds components to the build part of the Dockerfile -## \param self The dockerfile object -## \param c Component from the compile yaml - def writeDockerfileMkmf(self, c): -# Set up the compile variables - self.d.writelines(self.bldsetup) -# Shorthand for component - comp = c["component"] -# Make the component directory - self.d.write(" && mkdir -p $bld_dir/"+comp+" \\ \n") -# Get the paths needed for compiling - pstring = "" - for paths in c["paths"]: - pstring = pstring+"$src_dir/"+paths+" " -# Run list_paths - self.d.write(" && list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+" \\ \n") - self.d.write(" && cd $bld_dir/"+comp+" \\ \n") -# Create the mkmf line - if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: #Has requirements -#Set up the requirements as a string to inclue after the -o - reqstring = "" - for r in c["requires"]: - reqstring = reqstring+"-I$bld_dir/"+r+" " -#Figure out if we need the preprocessor - if c["doF90Cpp"]: - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - else: - self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") - -## \brief Writes a runscript to set up spack loads/environment in order to run the executable in the container; runscript copied into container -## \param self The dockerfile object -## \param RUNEnv The commands that have to be run at the beginning of a RUN in the dockerfile -## \param containerRun The container platform used with `exec` to run the container; apptainer or singularity used -## \param runOnDisk The path to the run script on the local disk - def writeRunscript(self,RUNenv,containerRun,runOnDisk): - #create runscript in tmp - create spack environment, install necessary packages, - self.createscript = ["#!/bin/bash \n", - "# Set up spack loads\n", - RUNenv[0]+"\n"] - with open(runOnDisk,"w") as f: - f.writelines(self.createscript) - f.write("# Load spack packages\n") - for env in RUNenv[1:]: - f.write(env+"\n") - - if self.l: - for l in self.l: + """ + Brief: Opens the Dockerfile for writing + Param: + - self : The dockerfile object + - base : The docker base image to start from + - libs : Additional libraries defined by user + - exp : The experiment name + - RUNenv : The commands that have to be run at + the beginning of a RUN in the dockerfile + to set up the environment + """ + def __init__(self,base,exp,libs,RUNenv,target): + """ + Initialize variables and write to the dockerfile + """ + self.base = base + self.e = exp + self.l = libs + self.src = "/apps/"+self.e+"/src" + self.bld = "/apps/"+self.e+"/exec" + self.mkmf = True + self.target = target + self.template = "/apps/mkmf/templates/hpcme-intel21.mk" + + # Set up spack loads in RUN commands in dockerfile + if RUNenv == "": + self.setup = ["RUN \\ \n"] + else: + self.setup = ["RUN "+RUNenv[0]+" \\ \n"] + self.setup + for env in RUNenv[1:]: + self.setup.append(" && "+env+" \\ \n") + if self.l: + for l in self.l: + self.setup.append(" && spack load "+l+" \\ \n") + + # Clone and copy mkmf through Dockerfile + self.mkmfclone=["RUN cd /apps \\ \n", + " && git clone --recursive https://github.com/NOAA-GFDL/mkmf \\ \n", + " && cp mkmf/bin/* /usr/local/bin \n"] + + # Set bld_dir, src_dir, mkmf_template + self.bldsetup=["RUN bld_dir="+self.bld+" \\ \n", + " && src_dir="+self.src+" \\ \n", + " && mkmf_template="+self.template+ " \\ \n"] + self.d=open("Dockerfile","w") + self.d.writelines("FROM "+self.base+" \n") + + def writeDockerfileCheckout(self, cScriptName, cOnDisk): + """ + Brief: writes to the checkout part of the Dockerfile and sets up the compile + Param: + - self : The dockerfile object + - cScriptName : The name of the checkout script in the container + - cOnDisk : The relative path to the checkout script on disk + """ + self.checkoutPath = self.src+"/"+ cScriptName + self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") + self.d.write("RUN chmod 744 "+self.src+"/checkout.sh \n") + self.d.writelines(self.setup) + self.d.write(" && "+self.src+"/checkout.sh \n") + # Clone mkmf + self.d.writelines(self.mkmfclone) + + def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): + """ + Brief: Copies the Makefile into the bldDir in the dockerfile + Param: + - self : The dockerfile object + - makefileOnDiskPath : The path to Makefile on the local disk + - linklineonDiskPath : The path to the link line script on the local disk + """ + # Set up the bldDir + # If no additional libraries defined + if self.l == None: + self.bldCreate=["RUN mkdir -p "+self.bld+" \n", + "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n"] + self.d.writelines(self.bldCreate) + # If additional libraries defined + if self.l != None: + self.bldCreate=["RUN mkdir -p "+self.bld+" \n", + "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n", + "RUN chmod +rw "+self.bld+"/Makefile \n", + "COPY "+ linklineonDiskPath +" "+self.bld+"/linkline.sh \n", + "RUN chmod 744 "+self.bld+"/linkline.sh \n"] + self.d.writelines(self.bldCreate) + self.d.writelines(self.setup) + self.d.write(" && "+self.bld+"/linkline.sh \n") + + def writeDockerfileMkmf(self, c): + """ + Brief: Adds components to the build part of the Dockerfile + Param: + - self : The dockerfile object + - c : Component from the compile yaml + """ + # Set up the compile variables + self.d.writelines(self.bldsetup) + + # Shorthand for component + comp = c["component"] + + # Make the component directory + self.d.write(" && mkdir -p $bld_dir/"+comp+" \\ \n") + + # Get the paths needed for compiling + pstring = "" + for paths in c["paths"]: + pstring = pstring+"$src_dir/"+paths+" " + + # Run list_paths + self.d.write(" && list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+" \\ \n") + self.d.write(" && cd $bld_dir/"+comp+" \\ \n") + + # Create the mkmf line + if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: #Has requirements + #Set up the requirements as a string to inclue after the -o + reqstring = "" + for r in c["requires"]: + reqstring = reqstring+"-I$bld_dir/"+r+" " + + #Figure out if we need the preprocessor + if c["doF90Cpp"]: + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + + def writeRunscript(self,RUNenv,containerRun,runOnDisk): + """ + Brief: Writes a runscript to set up spack loads/environment + in order to run the executable in the container; + runscript copied into container + Param: + - self : The dockerfile object + - RUNEnv : The commands that have to be run at + the beginning of a RUN in the dockerfile + - containerRun : The container platform used with `exec` + to run the container; apptainer + or singularity used + - runOnDisk : The path to the run script on the local disk + """ + #create runscript in tmp - create spack environment, install necessary packages, + self.createscript = ["#!/bin/bash \n", + "export BACKUP_LD_LIBRARY_PATH=$LD_LIBRARY\n", + "# Set up spack loads\n", + RUNenv[0]+"\n"] + with open(runOnDisk,"w") as f: + f.writelines(self.createscript) + f.write("# Load spack packages\n") + for env in RUNenv[1:]: + f.write(env+"\n") + + if self.l: + for l in self.l: self.spackloads = "spack load "+l+"\n" f.write(self.spackloads) - f.write("# Run executable\n") - f.write(self.bld+"/"+self.e+".x\n") - #copy runscript into container in dockerfile - self.d.write("COPY "+runOnDisk+" "+self.bld+"/execrunscript.sh\n") - #make runscript executable - self.d.write("RUN chmod 744 "+self.bld+"/execrunscript.sh\n") - #finish the dockerfile - self.d.writelines(self.setup) - self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") - self.d.write('ENTRYPOINT ["/bin/bash"]') - self.d.close() - -## Builds the container image for the model -## \param self The dockerfile object -## \param containerBuild The tool used to build the container; docker or podman used -## \param containerRun The container platform used with `exec` to run the container; apptainer or singularity used - def build(self,containerBuild,containerRun): - os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) - os.system("rm -f "+self.e+".tar "+self.e+".sif") - os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) - os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") + f.write("export LD_LIBRARY_PATH=$BACKUP_LD_LIBRARY_PATH:$LD_LIBRARY_PATH\n") + f.write("# Run executable\n") + f.write(self.bld+"/"+self.e+".x\n") + #copy runscript into container in dockerfile + self.d.write("COPY "+runOnDisk+" "+self.bld+"/execrunscript.sh\n") + #make runscript executable + self.d.write("RUN chmod 744 "+self.bld+"/execrunscript.sh\n") + #link runscript to more general location (for frerun container usage) + self.d.write("RUN mkdir -p /apps/bin \ \n") + self.d.write(" && ln -sf "+self.bld+"/execrunscript.sh "+"/apps/bin/execrunscript.sh") + #finish the dockerfile + self.d.writelines(self.setup) + self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") + self.d.write('ENTRYPOINT ["/bin/bash"]') + self.d.close() + + def build(self,containerBuild,containerRun): + """ + Brief: Builds the container image for the model + Param: + - self : The dockerfile object + - containerBuild : The tool used to build the container; + docker or podman used + - containerRun : The container platform used with `exec` to + run the container; apptainer or singularity used + """ + os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) + os.system("rm -f "+self.e+".tar "+self.e+".sif") + os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) + os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") diff --git a/fre/make/gfdlfremake/checkout.py b/fre/make/gfdlfremake/checkout.py index 07e13730..3a1ffa9b 100644 --- a/fre/make/gfdlfremake/checkout.py +++ b/fre/make/gfdlfremake/checkout.py @@ -2,119 +2,154 @@ import subprocess ## TODO: Add parallelizations using () and simplify -## Creates the clone lines for the checkout script -## \param file Checkout script file -## \param repo the repo(s) to clone -## \param component Model component name -## \param srcDir The source directory -## \param branch The version to clone/checkout -## \param add Additional instrcutions after the clone -## \param multi True if a component has more than one repo to clone def writeRepo(file,repo,component,srcDir,branch,add,multi,jobs,pc): -## Write message about cloning repo and branch in component - file.write("echo cloning "+repo+" -b "+branch+" into "+srcDir+"/"+component+"\n") -## If this component has multiple repos, clone everything in the component folder -## If it's not multi, then use the component name (comp) as the folder name to clone into - if multi: - file.write("mkdir -p "+component+"\n") - file.write("cd "+component+"\n") - comp="" - else: - comp=component + """ + Brief: Creates the clone lines for the checkout script + Param: + - file Checkout script file + - repo the repo(s) to clone + - component Model component name + - srcDir The source directory + - branch The version to clone/checkout + - add Additional instrcutions after the clone + - multi True if a component has more than one repo to clone + """ + ## Write message about cloning repo and branch in component + file.write("echo cloning "+repo+" -b "+branch+" into "+srcDir+"/"+component+"\n") -## Check if there is a branch/version and then write the clone line; record the pid of that clone in dictionary `pids` if parallel checkout option is defined - if pc: - if branch=="": - file.write("(git clone --recursive --jobs="+jobs+" "+repo+" "+comp+")"+pc+"\n") - if multi: - r=repo.split("/")[4].strip(".git") - file.write("pids+=("+r+"pid:$!)\n") - else: - file.write("pids+=("+comp+"pid:$!)\n") - else: - file.write("(git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+")"+pc+"\n") - if multi: - r=repo.split("/")[4].strip(".git") - file.write("pids+=("+r+"pid:$!)\n") - else: - file.write("pids+=("+comp+"pid:$!)\n") - else: - if branch=="": - file.write("git clone --recursive --jobs="+jobs+" "+repo+" "+comp+"\n") - else: - file.write("git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+"\n") + ## If this component has multiple repos, clone everything in the component folder + ## If it's not multi, then use the component name (comp) as the folder name to clone into + if multi: + file.write("mkdir -p "+component+"\n") + file.write("cd "+component+"\n") + comp="" + else: + comp=component + + ## Check if there is a branch/version and then write the clone line; + ## record the pid of that clone in dictionary `pids` if parallel + ## checkout option is defined + if pc: + if branch=="": + file.write("(git clone --recursive --jobs="+jobs+" "+repo+" "+comp+")"+pc+"\n") + if multi: + r=repo.split("/")[4].strip(".git") + file.write("pids+=("+r+"pid:$!)\n") + else: + file.write("pids+=("+comp+"pid:$!)\n") + else: + file.write("(git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+")"+pc+"\n") + if multi: + r=repo.split("/")[4].strip(".git") + file.write("pids+=("+r+"pid:$!)\n") + else: + file.write("pids+=("+comp+"pid:$!)\n") + else: + if branch=="": + file.write("git clone --recursive --jobs="+jobs+" "+repo+" "+comp+"\n") + else: + file.write("git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+"\n") + + ## Make sure to go back up in the folder structure + if multi: + file.write("cd .. \n") + if add!="": + file.write(add) -## Make sure to go back up in the folder structure - if multi: - file.write("cd .. \n") - if add!="": - file.write(add) - -## Class to create the checkout script class checkout(): -## \brief Opens the checkout script with the specified name -## \param self The checkout script object -## \param fname The file name of the checkout script -## \param srcDir The source directory where fname will be run and source will exist - def __init__(self,fname,srcDir): - self.fname = fname - self.src = srcDir - os.system("mkdir -p "+self.src) -##TODO: Force checkout - os.system("rm -rf "+self.src+"/*") - self.checkoutScript = open(self.src+"/"+fname, 'w') - self.checkoutScript.write("#!/bin/sh -f \n") - self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") -## \brief Writes the contents of the checkout script by looping through the input yaml -## \param self The checkout script object -## \param y The fremake compile yaml - def writeCheckout(self,y,jobs,pc): - self.checkoutScript.write("cd "+self.src +"\n") - for c in y['src']: - if type(c['repo']) is list and type(c['branch']) is list: - for (repo,branch) in zip(c['repo'],c['branch']): - writeRepo(self.checkoutScript,repo,c['component'],self.src,branch,c['additionalInstructions'],True,jobs,pc) - else: - writeRepo(self.checkoutScript,c['repo'],c['component'],self.src,c['branch'],c['additionalInstructions'],False,jobs,pc) -## \brief If pc is defined: Loops through dictionary of pids, waits for each pid individually, writes exit code in `check` list; allows checkoutscript to exit if exit code is not 0; closes the checkout script when writing is done -## \param self The checkout script object - def finish (self,pc): - if pc: - self.checkoutScript.write('for id in ${pids[@]}; do\n wait ${id##*:}\n check+=("clone of ${id%%:*} exited with status $?")\ndone\n') - self.checkoutScript.write('for stat in "${check[@]}"; do\n echo $stat \n if [ ${stat##* } -ne 0 ]; then\n exit ${stat##* }\n fi\ndone') - self.checkoutScript.close() - else: - self.checkoutScript.close() -## \brief Changes the permission on the checkout script and runs it -## \param self The checkout script object + """ + Brief: Class to create the checkout script + """ + def __init__(self,fname,srcDir): + """ + Brief: Opens the checkout script with the specified name + Param: + - self The checkout script object + - fname The file name of the checkout script + - srcDir The source directory where fname will be run and source will exist + """ + self.fname = fname + self.src = srcDir + os.system("mkdir -p "+self.src) + ##TODO: Force checkout + os.system("rm -rf "+self.src+"/*") + self.checkoutScript = open(self.src+"/"+fname, 'w') + self.checkoutScript.write("#!/bin/sh -f \n") + self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") + def writeCheckout(self,y,jobs,pc): + """ + Brief: Writes the contents of the checkout script by looping through the input yaml + Param: + - self The checkout script object + - y The fremake compile yaml + """ + self.checkoutScript.write("cd "+self.src +"\n") + for c in y['src']: + if type(c['repo']) is list and type(c['branch']) is list: + for (repo,branch) in zip(c['repo'],c['branch']): + writeRepo(self.checkoutScript,repo,c['component'],self.src,branch,c['additionalInstructions'],True,jobs,pc) + else: + writeRepo(self.checkoutScript,c['repo'],c['component'],self.src,c['branch'],c['additionalInstructions'],False,jobs,pc) + + def finish (self,pc): + """ + Brief: If pc is defined: Loops through dictionary of pids, + waits for each pid individually, writes exit code in + `check` list; + allows checkoutscript to exit if exit code is not 0; + closes the checkout script when writing is done + Param: + - self The checkout script object + """ + if pc: + self.checkoutScript.write('for id in ${pids[@]}; do\n wait ${id##*:}\n check+=("clone of ${id%%:*} exited with status $?")\ndone\n') + self.checkoutScript.write('for stat in "${check[@]}"; do\n echo $stat \n if [ ${stat##* } -ne 0 ]; then\n exit ${stat##* }\n fi\ndone') + self.checkoutScript.close() + else: + self.checkoutScript.close() + ## TODO: batch script building - def run (self): - os.chmod(self.src+"/"+self.fname, 0o744) - try: - subprocess.run(args=[self.src+"/"+self.fname], check=True) - except: - print("There was an error with the checkout script "+self.src+"/"+self.fname) - raise + def run (self): + """ + Brief: Changes the permission on the checkout script and runs it + Param: + - self The checkout script object + """ + os.chmod(self.src+"/"+self.fname, 0o744) + try: + subprocess.run(args=[self.src+"/"+self.fname], check=True) + except: + print("There was an error with the checkout script "+self.src+"/"+self.fname) + raise ################################################################################################### ## Subclass for container checkout class checkoutForContainer(checkout): -## \brief Opens the checkout script with the specified name -## \param self The checkout script object -## \param fname The file name of the checkout script -## \param srcDir The source directory where fname will be run and source will exist -## \param tmpdir The relative path on disk that fname will be created (and copied from into the -## container) - def __init__(self,fname,srcDir,tmpdir): - self.fname = fname - self.src = srcDir - self.tmpdir = tmpdir - os.system("mkdir -p "+self.tmpdir) - os.system("rm -rf "+self.tmpdir+"/*") - self.checkoutScript = open(self.tmpdir+"/"+fname, 'w') - self.checkoutScript.write("#!/bin/sh -fx \n") - self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") + """ + Brief: Subclass for container checkout + """ + def __init__(self,fname,srcDir,tmpdir): + """ + Brief: Opens the checkout script with the specified name + Param: + - self : The checkout script object + - fname : The file name of the checkout script + - srcDir : The source directory where fname will be run and source will exist + - tmpdir : The relative path on disk that fname will be created (and copied from into the + container) + """ + self.fname = fname + self.src = srcDir + self.tmpdir = tmpdir + os.system("mkdir -p "+self.tmpdir) + os.system("rm -rf "+self.tmpdir+"/*") + self.checkoutScript = open(self.tmpdir+"/"+fname, 'w') + self.checkoutScript.write("#!/bin/sh -fx \n") + self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") -## \brief Removes the self.tmpdir and contents -## \param self The checkout script object - def cleanup (self): - os.system("rm -rf "+self.tmpdir) + def cleanup (self): + """ + Brief: Removes the self.tmpdir and contents + Param: + - self The checkout script object + """ + os.system("rm -rf "+self.tmpdir) diff --git a/fre/make/gfdlfremake/makefilefre.py b/fre/make/gfdlfremake/makefilefre.py index 8cc6b6ea..dfe15fff 100644 --- a/fre/make/gfdlfremake/makefilefre.py +++ b/fre/make/gfdlfremake/makefilefre.py @@ -1,11 +1,14 @@ import os -import subprocess import textwrap -## \brief Writes the link line for bare metal and container builds -## \param self The Makefile object def linklineBuild(self): + """ + Brief: Writes the link line for bare metal and container builds + Param: + - self The Makefile object + """ linkline="" + #if additional libraries are defined, populate the link line with the correct information for libraries ## CONTAINER; write a script that will execute in the container, to fill in link line with additional libraries in Makefile if "tmp" in self.filePath: @@ -14,7 +17,7 @@ def linklineBuild(self): for l in self.l: fh.write(l+" ") fh.write("\n") - + self.linklinecreate = ''' line='' for l in $@; do @@ -54,97 +57,119 @@ def linklineBuild(self): os.system(f"sed -i 's|\($(LDFLAGS)\)|$(LL) \\1|' {self.filePath}/Makefile") class makefile(): -## \brief Opens Makefile and sets the experiment and other common variables -## \param self The Makefile object -## \param exp Experiment name -## \param libs Additional libraries/linker flags defined by user -## \param srcDir The path to the source directory -## \param bldDir The path to the build directory -## \param mkTemplatePath The path of the template .mk file for compiling - def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath): - self.e = exp - self.l = libs - self.src = srcDir - self.bld = bldDir - self.template = mkTemplatePath - self.c =[] #components - self.r=[] #requires - self.o=[] #overrides - os.system("mkdir -p "+self.bld) - self.filePath = self.bld # Needed so that the container and bare metal builds can - # use the same function to create the Makefile - -## \brief Adds a component and corresponding requires to the list -## \param self The Makefile object -## \param c The component -## \param r The requires for that componenet -## \param o The overrides for that component - def addComponent (self,c,r,o): - self.c.append(c) - self.r.append(r) - self.o.append(o) -## \brief Sorts the component by how many requires there are for that component -## \param self The Makefile object -## \param c The component -## \param r The requires for that component -## \param o The overrides for that component - def createLibstring (self,c,r,o): - d=zip(self.c,self.r,self.o) - return(sorted(d,key=lambda values:len(values[1]),reverse=True)) - -## \brief Writes the Makefile. Should be called after all components are added -## \param self The Makefile object - def writeMakefile (self): -# Get the list of all of the libraries - sd=self.createLibstring(self.c,self.r,self.o) - libstring=" " - for i in sd: - lib=i[0] - libstring = libstring+lib+"/lib"+lib+".a " -# Open the Makefile for Writing - with open(self.filePath+"/Makefile","w") as fh: -# Write the header information for the Makefile - fh.write("# Makefile for "+self.e+"\n") - fh.write("SRCROOT = "+self.src+"/\n") - fh.write("BUILDROOT = "+self.bld+"/\n") - fh.write("MK_TEMPLATE = "+self.template+"\n") - fh.write("include $(MK_TEMPLATE)"+"\n") -# Write the main experiment compile - fh.write(self.e+".x: "+libstring+"\n") - fh.write("\t$(LD) $^ $(LDFLAGS) -o $@ $(STATIC_LIBS)"+"\n") - -# Write the link line script with user-provided libraries - if self.l: - linklineBuild(self) - -# Write the individual component library compiles - with open(self.filePath+"/Makefile","a") as fh: - for (c,r,o) in sd: - libstring = " " - for lib in r: - libstring = libstring+lib+"/lib"+lib+".a " - cstring = c+"/lib"+c+".a: " - fh.write(cstring+libstring+" FORCE"+"\n") - if o == "": - fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) --directory="+c+" $(@F)\n") - else: - fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) "+o+" --directory="+c+" $(@F)\n") - fh.write("FORCE:\n") - fh.write("\n") -# Set up the clean - fh.write("clean:\n") - for c in self.c: - fh.write("\t$(MAKE) --directory="+c+" clean\n") -# Set up localize - fh.write("localize:\n") - for c in self.c: - fh.write("\t$(MAKE) -f $(BUILDROOT)"+c+" localize\n") -# Set up distclean - fh.write("distclean:\n") - for c in self.c: - fh.write("\t$(RM) -r "+c+"\n") - fh.write("\t$(RM) -r "+self.e+"\n") - fh.write("\t$(RM) -r Makefile \n") + def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath): + """ + Brief: Opens Makefile and sets the experiment and other common variables + Param: + - self The Makefile object + - exp Experiment name + - libs Additional libraries/linker flags defined by user + - srcDir The path to the source directory + - bldDir The path to the build directory + - mkTemplatePath The path of the template .mk file for compiling + """ + self.e = exp + self.l = libs + self.src = srcDir + self.bld = bldDir + self.template = mkTemplatePath + self.c =[] #components + self.r=[] #requires + self.o=[] #overrides + os.system("mkdir -p "+self.bld) + self.filePath = self.bld # Needed so that the container and bare metal builds can + # use the same function to create the Makefile + + def addComponent (self,c,r,o): + """ + Brief: Adds a component and corresponding requires to the list + Param: + - self The Makefile object + - c The component + - r The requires for that componenet + - o The overrides for that component + """ + self.c.append(c) + self.r.append(r) + self.o.append(o) + + def createLibstring (self,c,r,o): + """ + Brief: Sorts the component by how many requires there are for that component + Param: + - self The Makefile object + - c The component + - r The requires for that component + - o The overrides for that component + """ + # org_comp : returns a zip object + org_comp = zip(self.c,self.r,self.o) + # Sort zip object so that the component with the most requires (self.r) is listed first, and so on + sort = sorted(org_comp,key=lambda values:len(values[1]),reverse=True) + + return sort + + def writeMakefile (self): + """ + Brief: Writes the Makefile. Should be called after all components are added + Param: + - self The Makefile object + """ + # Get the list of all of the libraries + sd=self.createLibstring(self.c,self.r,self.o) + libstring=" " + for i in sd: + lib=i[0] + libstring = libstring+lib+"/lib"+lib+".a " + + # Open the Makefile for Writing + with open(self.filePath+"/Makefile","w") as fh: + # Write the header information for the Makefile + fh.write("# Makefile for "+self.e+"\n") + fh.write("SRCROOT = "+self.src+"/\n") + fh.write("BUILDROOT = "+self.bld+"/\n") + fh.write("MK_TEMPLATE = "+self.template+"\n") + fh.write("include $(MK_TEMPLATE)"+"\n") + + # Write the main experiment compile + fh.write(self.e+".x: "+libstring+"\n") + fh.write("\t$(LD) $^ $(LDFLAGS) -o $@ $(STATIC_LIBS)"+"\n") + + # Write the link line script with user-provided libraries + if self.l: + linklineBuild(self) + + # Write the individual component library compiles + with open(self.filePath+"/Makefile","a") as fh: + for (c,r,o) in sd: + libstring = " " + for lib in r: + libstring = libstring+lib+"/lib"+lib+".a " + cstring = c+"/lib"+c+".a: " + fh.write(cstring+libstring+" FORCE"+"\n") + if o == "": + fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) --directory="+c+" $(@F)\n") + else: + fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) "+o+" --directory="+c+" $(@F)\n") + fh.write("FORCE:\n") + fh.write("\n") + + # Set up the clean + fh.write("clean:\n") + for c in self.c: + fh.write("\t$(MAKE) --directory="+c+" clean\n") + + # Set up localize + fh.write("localize:\n") + for c in self.c: + fh.write("\t$(MAKE) -f $(BUILDROOT)"+c+" localize\n") + + # Set up distclean + fh.write("distclean:\n") + for c in self.c: + fh.write("\t$(RM) -r "+c+"\n") + fh.write("\t$(RM) -r "+self.e+"\n") + fh.write("\t$(RM) -r Makefile \n") ### This seems incomplete? ~ ejs ## The makefile class for a container. It gets built into a temporary directory so it can be copied @@ -156,21 +181,24 @@ def writeMakefile (self): ## \param mkTemplatePath The path of the template .mk file for compiling ## \param tmpDir A local path to temporarily store files build to be copied to the container class makefileContainer(makefile): - def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath,tmpDir): - self.e = exp - self.l = libs - self.src = srcDir - self.bld = bldDir - self.template = mkTemplatePath - self.tmpDir = tmpDir - self.c =[] #components - self.r=[] #requires - self.o=[] #overrides - os.system("mkdir -p "+self.tmpDir) - self.filePath = self.tmpDir # Needed so that the container and bare metal builds can + def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath,tmpDir): + self.e = exp + self.l = libs + self.src = srcDir + self.bld = bldDir + self.template = mkTemplatePath + self.tmpDir = tmpDir + self.c =[] #components + self.r=[] #requires + self.o=[] #overrides + os.system("mkdir -p "+self.tmpDir) + self.filePath = self.tmpDir # Needed so that the container and bare metal builds can # use the same function to create the Makefile -## \return the tmpDir -## \param self The makefile object - def getTmpDir(self): - return self.tmpDir + def getTmpDir(self): + """ + Brief: Return the tmpDir + Param: + - self The makefile object + """ + return self.tmpDir diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index df37aa56..fe8924f9 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -1,106 +1,108 @@ import yaml class platforms (): -## \param self The platform yaml object -## \param fname The path to the platform yaml file -## \param v the fre variables defined in the model Yaml - def __init__(self,fname,v): - with open(fname, 'r') as file: - self.yaml = yaml.safe_load(v.freVarSub(file.read())) -## Check the yaml for errors/omissions - try: - self.yaml["platforms"] - except: - print(fname+" must have a platforms key\n") - raise -## Loop through the platforms - for p in self.yaml["platforms"]: -## Check the platform name - try: - p["name"] - except: - print("At least one of the platforms is missing a name in "+fname+"\n") - raise -## Check the compiler - try: - p["compiler"] - except: - print ("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") - raise -## Check for the Fortran (fc) and C (cc) compilers - try: - p["fc"] - except: - print ("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") - raise - try: - p["cc"] - except: - print ("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") - raise -## Check for modules to load - try: - p["modules"] - except: - p["modules"]=[""] -## Check for modulesInit to set up the modules environment - try: - p["modulesInit"] - except: - p["modulesInit"]=[""] -## Get the root for the build - try: - p["modelRoot"] - except: - p["modelRoot"] = "/apps" -## Check if we are working with a container and get the info for that - try: - p["container"] - except: - p["container"] = False - p["RUNenv"] = "" - p["containerBuild"] = "" - p["containerRun"] = "" - if p["container"]: -## Check the container builder - try: + def __init__(self,platforminfo): + """ + Param: + - self The platform yaml object + - platforminfo dictionary with platform information + from the combined yaml + """ + self.yaml = platforminfo + + ## Check the yaml for errors/omissions + ## Loop through the platforms + for p in self.yaml: + ## Check the platform name + try: + p["name"] + except: + raise Exception("At least one of the platforms is missing a name in "+fname+"\n") + ## Check the compiler + try: + p["compiler"] + except: + raise Exception("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") + ## Check for the Fortran (fc) and C (cc) compilers + try: + p["fc"] + except: + raise Exception("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") + try: + p["cc"] + except: + raise Exception("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") + ## Check for modules to load + try: + p["modules"] + except: + p["modules"]=[""] + ## Check for modulesInit to set up the modules environment + try: + p["modulesInit"] + except: + p["modulesInit"]=[""] + ## Get the root for the build + try: + p["modelRoot"] + except: + p["modelRoot"] = "/apps" + ## Check if we are working with a container and get the info for that + try: + p["container"] + except: + p["container"] = False + p["RUNenv"] = "" + p["containerBuild"] = "" + p["containerRun"] = "" + if p["container"]: + ## Check the container builder + try: p["containerBuild"] - except: - print ("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") - raise - if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": + except: + raise Exception("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") + if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": raise ValueError("Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") -## Check for container environment set up for RUN commands - try: + ## Check for container environment set up for RUN commands + try: p["RUNenv"] - except: + except: p["RUNenv"] = "" -## Check the container runner - try: + ## Check the container runner + try: p["containerRun"] - except: - print ("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") - raise - if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": + except: + raise Exception("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") + if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": raise ValueError("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") -## set the location of the mkTemplate. In a container, it uses the hpc-me template cloned from mkmf - p["mkTemplate"] = "/apps/mkmf/templates/hpcme-intel21.mk" - else: - try: + ## set the location of the mkTemplate. + ## In a container, it uses the hpc-me template cloned from mkmf + p["mkTemplate"] = "/apps/mkmf/templates/hpcme-intel21.mk" + else: + try: p["mkTemplate"] - except: + except: raise ValueError("The non-container platform "+p["name"]+" must specify a mkTemplate \n") -## \brief Checks if the platform yaml has the named platform - def hasPlatform(self,name): - for p in self.yaml["platforms"]: - if p["name"] == name: - return True - return False -## \brief Get the platform yaml - def getPlatformsYaml(self): - return self.yaml -## \brief Get the platform information from the name of the platform - def getPlatformFromName(self,name): - for p in self.yaml["platforms"]: - if p["name"] == name: - return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) + + def hasPlatform(self,name): + """ + Brief: Checks if the platform yaml has the named platform + """ + for p in self.yaml: + if p["name"] == name: + return True + return False + + def getPlatformsYaml(self): + """ + Brief: Get the platform yaml + """ + return self.yaml + + def getPlatformFromName(self,name): + """ + Brief: Get the platform information from the name of the platform + """ + for p in self.yaml: + if p["name"] == name: + return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) diff --git a/fre/make/gfdlfremake/schema.json b/fre/make/gfdlfremake/schema.json index 7e460d6f..751bb9db 100644 --- a/fre/make/gfdlfremake/schema.json +++ b/fre/make/gfdlfremake/schema.json @@ -1,164 +1,201 @@ { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://example.com/product.schema.json", - "title": "frecanopy", - "description": "A yaml base for use with fre canopy", - "type": "object", - "properties": { - "platformYaml": { - "description": "Path to the platform yaml file", - "type": "string" - }, - "compileYaml": { - "description": "Path to the compile yaml file", - "type": "string" + "$schema": "http://json-schema.org/draft-06/schema#", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "description": "The name of the experiment", + "type": "string" + }, + "platform": { + "description": "The platforms listed in the command", + "type": "string" + }, + "target": { + "description": "The targets listed in the command", + "type": "string" + }, + "build": { + "type": "object", + "additionalProperties": false, + "properties": { + "compileYaml": { + "description": "Path to the compile yaml.", + "type": "string" + }, + "platformYaml": { + "description": "Path to the platform yaml.", + "type": "string" + } + } + }, + "compile": { + "description": "The source code descriptions", + "$ref": "#/definitions/Compile" + }, + "platforms": { + "description": "FRE platforms", + "type": "array", + "items": {"$ref": "#/definitions/Platform"} + } + }, + "definitions": { + "Compile": { + "type": "object", + "properties": { + "experiment": { + "description": "The name of the model", + "type": "string" + }, + "container_addlibs": { + "description": "Libraries and packages needed for linking in the container", + "type": ["array","string","null"] + }, + "baremetal_linkerflags": { + "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", + "type": ["array","string","null"] + }, + "src": { + "type": "array", + "items": {"$ref": "#/definitions/Src"} + } + } + }, + "Src": { + "type": "object", + "properties": { + "component": { + "description": "The name of the model component", + "type": "string" + }, + "repo": { + "anyOf": [ + { + "description": "The URL of the code repository", + "type": "array", + "items": { + "type": "string", + "format": "uri", + "qt-uri-protocols": [ + "https" + ], + "qt-uri-extensions": [ + ".git" + ] + } + }, + { + "description": "The URL of the code repository", + "type": "string", + "format": "uri", + "qt-uri-protocols": [ + "https" + ], + "qt-uri-extensions": [ + ".git" + ] + } + ] + }, + "cppdefs": { + "description": "String of CPPDEFs to include in compiling the component", + "type": "string" + }, + "branch": { + "anyOf": [ + { + "description": "The version of code to clone", + "type": "array", + "items": { + "type": "string" + } + }, + { + "description": "The version of code to clone", + "type": "string" + } + ] + }, + "otherFlags": { + "description": "String of Include flags necessary to retrieve other code needed", + "type": "string" + }, + "requires": { + "description": "list of componets that this component depends on", + "type": "array", + "items": {"type": "string"} + }, + "paths": { + "description": "A list of the paths in the component to compile", + "type": "array", + "items": {"type": "string"} + }, + "doF90Cpp": { + "description": "True if the preprocessor needs to be run", + "type": "boolean" }, - "experiment": { - "description": "The name of the model", - "type": "string" - }, - "container_addlibs":{ - "description": "Libraries and packages needed for linking in the container", - "type": ["array","string","null"] - }, - "baremetal_linkerflags":{ - "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", - "type": ["array","string","null"] - }, - "src":{ - "description": "The source code descriptions", - "type": "array", - "items":{"$ref": "#/$defs/comp" } - }, - "platforms":{ - "description": "FRE platforms", - "type": "array", - "items":{"$ref": "#/$defs/plat" } + "makeOverrides": { + "description": "Overrides openmp target for MOM6", + "type": "string" } - }, - "$defs":{ - "comp": { - "type": "object", - "required": ["component", "repo"], - "properties":{ - "component": { - "type": "string", - "description": "The name of the model component" - }, - "repo": { - "anyOf": [ - { - "type": "string", - "description": "The URL of the code repository" - }, - { - "type": ["array","string"], - "description": "The URL of the code repository" - } - ] - }, - "branch": { - "anyOf": [ - { - "type": "string", - "description": "The version of code to clone" - }, - { - "type": ["array","string"], - "description": "The version of code to clone" - } - ] - }, - "requires": { - "type": ["array","string"], - "description": "list of componets that this component depends on" - }, - "cppdefs": { - "type": "string", - "description": "String of CPPDEFs to include in compiling the component" - }, - "paths": { - "type": ["array","string"], - "description": "A list of the paths in the component to compile" - }, - "additionalInstructions": { - "type": "string", - "description": "Additional instuctions to run after checkout" - }, - "doF90Cpp": { - "type": "boolean", - "description": "True if the preprocessor needs to be run" - }, - "makeOverrides": { - "type": "string", - "description": "Overrides openmp target for MOM6" - }, - "otherFlags": { - "type": "string", - "description": "String of Include flags necessary to retrieve other code needed" - } - }, - "additionalProperties": false - }, - "plat": { - "type": "object", - "required": ["name", "compiler", "fc", "cc"], - "properties":{ - "name": { - "type": "string", - "description": "The name of the platform" - }, - "compiler": { - "type": "string", - "description": "The compiler used to build the model" - }, - "modulesInit": { - "type": ["array","string"], - "description": "Array of commands to run before loading modules" - }, - "modules": { - "type": ["array","string"], - "description": "List (array) of modules to load" - }, - "fc": { - "type": "string", - "description": "The Fortran compiler" - }, - "cc": { - "type": "string", - "description": "The C compiler" - }, - "mkTemplate": { - "type": "string", - "description": "Path to the mk template file" - }, - "modelRoot": { - "type": "string", - "description": "Path to the root for all model install files" - }, - "container": { - "anyOf": - [ - {"type": "string"}, - {"type": "boolean"} - ] - }, - "RUNenv": { - "type": ["array", "string"], - "description": "Commands needed at the beginning of a RUN in dockerfile" - }, - "containerBuild": { - "type": "string", - "description": "Program used to build the container" - }, - "containerRun": { - "type": "string", - "description": "Program used to run the container" - } - }, - "additionalProperties": false + } + }, + "Platform": { + "type": "object", + "properties": { + "name": { + "description": "The name of the platform", + "type": "string" + }, + "compiler": { + "description": "The compiler used to build the model", + "type": "string" + }, + "modulesInit": { + "description": "Array of commands to run before loading modules", + "type": "array", + "items": {"type": "string"} + }, + "modules": { + "description": "List (array) of modules to load", + "type": "array", + "items": { + "type": "string" + } + }, + "fc": { + "description": "The Fortran compiler", + "type": "string" + }, + "cc": { + "description": "The C compiler", + "type": "string" + }, + "mkTemplate": { + "description": "Path to the mk template file", + "type": "string" + }, + "modelRoot": { + "description": "Path to the root for all model install files", + "type": "string" + }, + "RUNenv": { + "description": "Commands needed at the beginning of a RUN in dockerfile", + "type": ["array","string"] + }, + "container": { + "description": "True/False if using container to compile", + "type": "boolean" + }, + "containerBuild": { + "description": "Program used to build the container", + "type": "string" + }, + "containerRun": { + "description": "Program used to run the container", + "type": "string" } - }, - "required": ["experiment", "src", "platforms"], - "additionalProperties": true + } + } + } } diff --git a/fre/make/gfdlfremake/targetfre.py b/fre/make/gfdlfremake/targetfre.py index b34e381f..0b2b5964 100644 --- a/fre/make/gfdlfremake/targetfre.py +++ b/fre/make/gfdlfremake/targetfre.py @@ -1,65 +1,81 @@ -## Stores information about the target class fretarget: -## Sets up information about the target and handles errors -## \note The default target is prod -## \param self the fretarget object -## \param t The target string - def __init__(self,t): - self.target = t # The target string -## Split the target string - targ = self.target.split('-') - self.makeline_add = "" - self.debug = False - self.repro = False - self.prod = False -## Parse the target string for prod, repro, and debug. Set up what to add to the -## make line during compile when using mkmf builds - for target in targ: - if target == "debug": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.debug = True - elif target == "prod": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.prod = True - elif target == "repro": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.repro = True -## Check to see if openmp is included in the target and add that to the makeline add string - if target == "openmp": - targ = target.upper() - self.makeline_add = self.makeline_add + targ + "=on " - self.openmp = True - else: - self.openmp = False -## Check to make sure only one of the prod, debug, repro are used - errormsg = "You can only list one mutually exclusive target, but your target '"+self.target+"' lists more than one of the following targets: \n debug \n prod \n repro" - if self.debug: - try: - if self.repro or self.prod == True: - raise ValueError(errormsg) - except ValueError: - raise - elif self.repro: - try: - if self.prod == True: - raise ValueError(errormsg) - except ValueError: - raise - else: - try: - if self.prod == False: - raise ValueError("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") - except ValueError: - raise -## Returns the name of the target -## \param self The fretarget object - def gettargetName(self): - return self.target -## Returns the makeline_add -## \param self The fretarget object - def getmakeline_add(self): - return self.makeline_add + """ + Class: Stores information about the target + """ + def __init__(self,t): + """ + Brief: Sets up information about the target and handles errors + Note: The default target is prod + Param: + - self the fretarget object + - t The target string + """ + self.target = t # The target string + ## Split the target string + targ = self.target.split('-') + self.makeline_add = "" + self.debug = False + self.repro = False + self.prod = False + + ## Parse the target string for prod, repro, and debug. Set up what to add to the + ## make line during compile when using mkmf builds + for target in targ: + if target == "debug": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.debug = True + elif target == "prod": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.prod = True + elif target == "repro": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.repro = True + + ## Check to see if openmp is included in the target and add that to the makeline add string + if target == "openmp": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.openmp = True + else: + self.openmp = False + + ## Check to make sure only one of the prod, debug, repro are used + errormsg = "You can only list one mutually exclusive target, but your target '"+self.target+"' lists more than one of the following targets: \n debug \n prod \n repro" + if self.debug: + try: + if self.repro or self.prod == True: + raise ValueError(errormsg) + except ValueError: + raise + elif self.repro: + try: + if self.prod == True: + raise ValueError(errormsg) + except ValueError: + raise + else: + try: + if self.prod == False: + raise ValueError("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") + except ValueError: + raise + + def gettargetName(self): + """ + Brief: Returns the name of the target + Param: + - self The fretarget object + """ + return self.target + + def getmakeline_add(self): + """ + Brief: Returns the makeline_add + Param: + - self The fretarget object + """ + return self.makeline_add diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 52adcdcb..6f638bbb 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -1,166 +1,198 @@ -import yaml +import os import json +import yaml from jsonschema import validate, ValidationError, SchemaError from . import platformfre -## Open the yaml file and parse as fremakeYaml -## \param fname the name of the yaml file to parse -## \param v the FRE yaml varaibles (FRE properties) def parseCompile(fname,v): -## Open the yaml file and parse as fremakeYaml - with open(fname, 'r') as file: - # Substitute the variables and read the updated yaml string - y = yaml.safe_load(v.freVarSub(file.read())) - return y -## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will + """ + Brief: Open the yaml file and parse as fremakeYaml + Param: + - fname the name of the yaml file to parse + - v the FRE yaml variables + """ + # Open the yaml file and parse as fremakeYaml + with open(fname, 'r') as yamlfile: + y = yaml.safe_load(v.freVarSub(yamlfile.read())) + + return y + +##### THIS SEEMS UNUSED +## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will ## set a default value for the variable -## \param var A variable in the yaml -## \param val a default value for var -## \param req if true, the variable is required in the yaml and an exception will be raised -## \param err An error message to print if the variable is required and doesn't exist -def yamlVarCheck(var,val="",req=False,err="error"): - try: - var - except: - if req: - print (err) - raise - else: - var = val -## This will read the compile yaml for FRE and then fill in any of the missing non-required variables +#def yamlVarCheck(var,val="",req=False,err="error"): +# """ +# Brief: Checks the yaml for variables. Required variables will dump and error. +# Non-required variables will set a default value for the variable +# Param: +# - var A variable in the yaml +# - val a default value for var +# - req if true, the variable is required in the yaml and an exception will be raised +# - err An error message to print if the variable is required and doesn't exist +# """ +# try: +# var +# except: +# if req: +# print (err) +# raise +# else: +# var = val + class compileYaml(): -## Read get the compile yaml and fill in the missing pieces -## \param self the compile Yaml object -## \yamlFile The path to the compile yaml file -## \v The FRE variables set in the model yaml - def __init__(self,yamlFile,v): - self.file = yamlFile - self.yaml = parseCompile(self.file,v) - ## Check the yaml for required things - ## Check for required experiment name - try: - self.yaml["experiment"] - except: - print("You must set an experiment name to compile \n") - raise - ## Check for optional libraries and packages for linking in container - try: - self.yaml["container_addlibs"] - except: - self.yaml["container_addlibs"]="" - ## Check for optional libraries and packages for linking on bare-metal system - try: - self.yaml["baremetal_linkerflags"] - except: - self.yaml["baremetal_linkerflags"]="" -# ## Set up the srcDir -# self.src = modelRoot + "/" + self.yaml["experiment"] + "/src" - ## Check for required src - try: - self.yaml["src"] - except: - print("You must set a src to specify the sources in "+self.yaml["experiment"]+"\n") - raise - ## Loop through the src array - for c in self.yaml['src']: - ## Check for required componenet name - try: - c['component'] - except: - print("You must set the 'componet' name for each src component") - raise - ## Check for required repo url - try: - c['repo'] - except: - print("'repo' is missing from the component "+c['component']+" in "+self.yaml["experiment"]+"\n") - raise - # Check for optional branch. Otherwise set it to blank - try: - c['branch'] - except: - c['branch']="" - # Check for optional cppdefs. Otherwise set it to blank - try: - c['cppdefs'] - except: - c['cppdefs']="" - # Check for optional doF90Cpp. Otherwise set it to False - try: - c['doF90Cpp'] - except: - c['doF90Cpp']=False - # Check for optional additional instructions. Otherwise set it to blank - try: - c['additionalInstructions'] - except: - c['additionalInstructions']="" - # Check for optional paths. Otherwise set it to blank - try: - c['paths'] - except: - c['paths']=[c['component']] - # Check for optional requires. Otherwise set it to blank - try: - c['requires'] - except: - c['requires']=[] - # Check for optional overrides. Otherwise set it to blank - try: - c['makeOverrides'] - except: - c['makeOverrides']="" - # Check for optional flags. Otherwise set it to blank. - try: - c["otherFlags"] - except: - c["otherFlags"]="" - -## Returns the compile yaml - def getCompileYaml(self): - try: - self.yaml - except: - print ("You must initialize the compile YAML object before you try to get the yaml \n") - raise - return self.yaml - -######################################################################################################################### -## \description This will take the models yaml file which has a list of the sub yaml files and combine them into the -## full freyaml that can be used and checked -# platformYaml: platforms.yaml -# layoutYaml: -# compileYaml: compile.yaml -# experiments: + """ + Brief: This will read the compile yaml for FRE and then fill in any of the missing non-required variables + """ + def __init__(self,compileinfo): + """ + Brief: Read get the compile yaml and fill in the missing pieces + Param: + - self the compile Yaml object + - compileinfo dictionary with compile information from the combined yaml + """ + # compile information from the combined yaml + self.yaml = compileinfo + + ## Check the yaml for required things + ## Check for required experiment name + try: + self.yaml["experiment"] + except: + print("You must set an experiment name to compile \n") + raise + ## Check for optional libraries and packages for linking in container + try: + self.yaml["container_addlibs"] + except: + self.yaml["container_addlibs"]="" + ## Check for optional libraries and packages for linking on bare-metal system + try: + self.yaml["baremetal_linkerflags"] + except: + self.yaml["baremetal_linkerflags"]="" + ## Check for required src + try: + self.yaml["src"] + except: + print("You must set a src to specify the sources in modelRoot/"+self.yaml["experiment"]+"\n") + raise + ## Loop through the src array + for c in self.yaml['src']: + ## Check for required componenet name + try: + c['component'] + except: + print("You must set the 'componet' name for each src component") + raise + ## Check for required repo url + try: + c['repo'] + except: + print("'repo' is missing from the component "+c['component']+" in "+self.yaml["experiment"]+"\n") + raise + # Check for optional branch. Otherwise set it to blank + try: + c['branch'] + except: + c['branch']="" + # Check for optional cppdefs. Otherwise set it to blank + try: + c['cppdefs'] + except: + c['cppdefs']="" + # Check for optional doF90Cpp. Otherwise set it to False + try: + c['doF90Cpp'] + except: + c['doF90Cpp']=False + # Check for optional additional instructions. Otherwise set it to blank + try: + c['additionalInstructions'] + except: + c['additionalInstructions']="" + # Check for optional paths. Otherwise set it to blank + try: + c['paths'] + except: + c['paths']=[c['component']] + # Check for optional requires. Otherwise set it to blank + try: + c['requires'] + except: + c['requires']=[] + # Check for optional overrides. Otherwise set it to blank + try: + c['makeOverrides'] + except: + c['makeOverrides']="" + # Check for optional flags. Otherwise set it to blank. + try: + c["otherFlags"] + except: + c["otherFlags"]="" + + def getCompileYaml(self): + """ + Brief: Returns the compile yaml + """ + try: + self.yaml + except: + print ("You must initialize the compile YAML object before you try to get the yaml \n") + raise + return self.yaml class freyaml(): -## \param self The freyaml object -## \param modelFileName The name of the model yaml file -## \param v the FRE yaml varaibles object (FRE properties) - def __init__(self,modelFileName,v): - self.freyaml = {} - self.modelfile = modelFileName - with open(self.modelfile, 'r') as file: - self.modelyaml = yaml.safe_load(v.freVarSub(file.read())) - self.freyaml.update(self.modelyaml) - self.compilefile = self.modelyaml["compileYaml"] - self.compile = compileYaml(self.compilefile,v) - self.compileyaml = self.compile.getCompileYaml() - self.freyaml.update(self.compileyaml) - self.platformsfile = self.modelyaml["platformYaml"] - self.platforms = platformfre.platforms(self.platformsfile,v) - self.platformsyaml = self.platforms.getPlatformsYaml() - self.freyaml.update(self.platformsyaml) -## Validate the YAML - fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) - schema_path = os.path.join(fremake_package_dir, 'schema.json') - with open(schema_path, 'r') as f: - s = f.read() - schema = json.loads(s) - validate(instance=self.freyaml, schema=schema) -## \return platform yaml dictionary - def getPlatformsYaml(self): - return self.platformsyaml -## \return compile yaml dictionary - def getCompileYaml(self): - return self.compileyaml + """ + Brief: This will take the combined yaml file, parse information, and fill in missing variables + to make the full freyaml that can be used and checked + Note: + - platformYaml: platforms.yaml + - compileYaml: compile.yaml + """ + def __init__(self,combinedyaml,v): + """ + Param: + - self The freyaml object + - combinedyaml The name of the combined yaml file + - v FRE yaml variables + """ + self.combinedfile = combinedyaml + + self.freyaml = parseCompile(self.combinedfile, v) + + #get compile info + self.compiledict = self.freyaml.get("compile") + self.compile = compileYaml(self.compiledict) + self.compileyaml = self.compile.getCompileYaml() + + #self.freyaml.update(self.compileyaml) + + #get platform info + self.platformsdict = self.freyaml.get("platforms") + self.platforms = platformfre.platforms(self.platformsdict) + self.platformsyaml = self.platforms.getPlatformsYaml() + + #self.freyaml.update(self.platformsyaml) + + ## VALIDATION OF COMBINED YAML FOR COMPILATION + fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) + schema_path = os.path.join(fremake_package_dir, 'schema.json') + with open(schema_path, 'r') as f: + s = f.read() + schema = json.loads(s) + + validate(instance=self.freyaml,schema=schema) + print("\nCOMBINED YAML VALID") + + def getCompileYaml(self): + """ + Brief: Returns the compile yaml + """ + return self.compileyaml + + def getPlatformsYaml(self): + """ + Brief: Returns the compile yaml + """ + return self.platformsyaml diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index e45d6837..006f195c 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -8,18 +8,15 @@ import os import logging from multiprocessing.dummy import Pool - import click - +from pathlib import Path from .gfdlfremake import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal - +import fre.yamltools.combine_yamls as cy @click.command() -def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): - +def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): yml = yamlfile - ps = platform - ts = target + name = yamlfile.split(".")[0] nparallel = parallel jobs = str(jobs) pcheck = no_parallel_checkout @@ -43,11 +40,18 @@ def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout plist = platform tlist = target + # Combined compile yaml file + combined = Path(f"combined-{name}.yaml") + + ## If combined yaml exists, note message of its existence + ## If combined yaml does not exist, combine model, compile, and platform yamls + full_combined = cy.combined_compile_existcheck(combined,yml,platform,target) + ## Get the variables in the model yaml - freVars = varsfre.frevars(yml) + freVars = varsfre.frevars(full_combined) ## Open the yaml file and parse as fremakeYaml - modelYaml = yamlfre.freyaml(yml,freVars) + modelYaml = yamlfre.freyaml(full_combined,freVars) fremakeYaml = modelYaml.getCompileYaml() ## Error checking the targets @@ -62,8 +66,9 @@ def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) + + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Create the checkout script if iscontainer == False: @@ -87,7 +92,7 @@ def fremake_run(yamlfile, platform, target, parallel, jobs, no_parallel_checkout if modelYaml.platforms.hasPlatform(platformName): pass else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + raise ValueError (platformName + " does not exist in " + modelYaml.platformsfile) (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) ## Make the source directory based on the modelRoot and platform diff --git a/fre/make/tests/AM5_example/am5.yaml b/fre/make/tests/AM5_example/am5.yaml index 4b7bf8d4..359755ec 100644 --- a/fre/make/tests/AM5_example/am5.yaml +++ b/fre/make/tests/AM5_example/am5.yaml @@ -1,6 +1,104 @@ -platformYaml: platforms.yaml -compileYaml: compile.yaml -release: f1a1r1 -INTEL: "intel-classic" -FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "1yr" + - &PP_AMIP_CHUNK384 "1yr" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "5yr" + - &PP_CPLD_CHUNK_B "20yr" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +build: + # compile information + compileYaml: "compile.yaml" + platformYaml: "yaml_include/platforms.yaml" + +shared: + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - "yaml_include/pp.c96_amip.yaml" + - name: "c96L65_am5f7b12r1_pdclim1850F" + pp: + - "yaml_include/pp.c96_clim.yaml" + - name: "c96L65_am5f7b12r1_pdclim2010F" + pp: + - "yaml_include/pp.c96_clim.yaml" + - name: "c96L65_am5f7b12r1_pdclim2010AERF" + pp: + - "yaml_include/pp.c96_clim.yaml" + - name: "c384L65_am5f7b12r1_amip" + pp: + - "yaml_include/pp.c384_amip.yaml" + - name: "c384L65_am5f7b12r1_pdclim2010F" + pp: + - "yaml_include/pp.c384_clim.yaml" + - name: "c384L65_am5f7b12r1_pdclim1850F" + pp: + - "yaml_include/pp.c384_clim.yaml" + - name: "c384L65_am5f7b12r1_pdclim2010AERF" + pp: + - "yaml_include/pp.c384_clim.yaml" + - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" + pp: + - "yaml_include/pp.c384_amip.yaml" + - "yaml_include/pp.om4.yaml" + - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" + pp: + - "yaml_include/pp.c96_amip.yaml" + - "yaml_include/pp.om4.yaml" + - name: "c96L65_am5f7b12r1_amip_cosp" + pp: + - "yaml_include/pp.c96_amip.yaml" diff --git a/fre/make/tests/AM5_example/compile.yaml b/fre/make/tests/AM5_example/compile.yaml index 5200599c..5f9a361b 100644 --- a/fre/make/tests/AM5_example/compile.yaml +++ b/fre/make/tests/AM5_example/compile.yaml @@ -1,66 +1,67 @@ -experiment: "am5" -container_addlibs: -baremetal_linkerflags: -src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" - branch: "2022.01" - cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" - otherFlags: "$(FMSincludes)" - - component: "am5_phys" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" - branch: "2022.01" - otherFlags: "$(FMSincludes)" - - component: "GFDL_atmos_cubed_sphere" - requires: ["FMS", "am5_phys"] - repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" - branch: "2022.01" - paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", - "GFDL_atmos_cubed_sphere/model", - "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", - "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", - "GFDL_atmos_cubed_sphere/tools", - "GFDL_atmos_cubed_sphere/GFDL_tools"] - otherFlags: "$(FMSincludes)" - - component: "atmos_drivers" - requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] - repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE" - branch: "2022.01" - paths: ["atmos_drivers/coupled"] - otherFlags: "$(FMSincludes)" - - component: "ice_sis" - requires: ["FMS", "ice_param", "mom6"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" - branch: "2021.02" - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "ice_param" - repo: "https://github.com/NOAA-GFDL/ice_param.git" - cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" - branch: "2021.02" - requires: ["FMS", "mom6"] - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "land_lad2" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" - branch: "2022.01" - branch: "land_lad2_2021.02" - doF90Cpp: True - cppdefs: "-DINTERNAL_FILE_NML" - otherFlags: "$(FMSincludes)" - - component: "mom6" - requires: ["FMS"] - paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] - branch: ["2021.02","dev/gfdl/2018.04.06"] - repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] - makeOverrides: 'OPENMP=""' - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "FMScoupler" - paths: ["FMScoupler/full", "FMScoupler/shared"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - branch: "2022.01" - requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] - otherFlags: "$(FMSincludes) $(momIncludes)" +compile: + experiment: "am5" + container_addlibs: + baremetal_linkerflags: + src: + - component: "FMS" + repo: "https://github.com/NOAA-GFDL/FMS.git" + cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" + branch: "2022.01" + cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" + otherFlags: *FMSincludes + - component: "am5_phys" + requires: ["FMS"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" + branch: "2022.01" + otherFlags: *FMSincludes + - component: "GFDL_atmos_cubed_sphere" + requires: ["FMS", "am5_phys"] + repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" + cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" + branch: "2022.01" + paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", + "GFDL_atmos_cubed_sphere/model", + "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", + "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", + "GFDL_atmos_cubed_sphere/tools", + "GFDL_atmos_cubed_sphere/GFDL_tools"] + otherFlags: *FMSincludes + - component: "atmos_drivers" + requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] + repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" + cppdefs: "-DSPMD -DCLIMATE_NUDGE" + branch: "2022.01" + paths: ["atmos_drivers/coupled"] + otherFlags: *FMSincludes + - component: "ice_sis" + requires: ["FMS", "ice_param", "mom6"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" + branch: "2021.02" + otherFlags: !join [*FMSincludes, " ", *momIncludes] + - component: "ice_param" + repo: "https://github.com/NOAA-GFDL/ice_param.git" + cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" + branch: "2021.02" + requires: ["FMS", "mom6"] + otherFlags: !join [*FMSincludes," ", *momIncludes] + - component: "land_lad2" + requires: ["FMS"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" + branch: "2022.01" + branch: "land_lad2_2021.02" + doF90Cpp: True + cppdefs: "-DINTERNAL_FILE_NML" + otherFlags: *FMSincludes + - component: "mom6" + requires: ["FMS"] + paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] + branch: ["2021.02","dev/gfdl/2018.04.06"] + repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] + makeOverrides: 'OPENMP=""' + otherFlags: !join [*FMSincludes, " ", *momIncludes] + - component: "FMScoupler" + paths: ["FMScoupler/full", "FMScoupler/shared"] + repo: "https://github.com/NOAA-GFDL/FMScoupler.git" + branch: "2022.01" + requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] + otherFlags: !join [*FMSincludes, " ", *momIncludes] diff --git a/fre/make/tests/AM5_example/yaml_include/platforms.yaml b/fre/make/tests/AM5_example/yaml_include/platforms.yaml new file mode 100644 index 00000000..60d1aad2 --- /dev/null +++ b/fre/make/tests/AM5_example/yaml_include/platforms.yaml @@ -0,0 +1,26 @@ +platforms: + - name: ncrc5.intel + compiler: intel + modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] + modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + fc: ftn + cc: cc + mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] + modelRoot: ${HOME}/fremake_canopy/test + - name: ncrc5.intel23 + compiler: intel + modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] + modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + fc: ftn + cc: cc + mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] + modelRoot: ${HOME}/fremake_canopy/test + - name: hpcme.2023 + compiler: intel + RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] + modelRoot: /apps + fc: mpiifort + cc: mpiicc + container: True + containerBuild: "podman" + containerRun: "apptainer" diff --git a/fre/yamltools/tests/AM5_example/yaml_include/pp.c96_amip.yaml b/fre/make/tests/AM5_example/yaml_include/pp.c96_amip.yaml similarity index 100% rename from fre/yamltools/tests/AM5_example/yaml_include/pp.c96_amip.yaml rename to fre/make/tests/AM5_example/yaml_include/pp.c96_amip.yaml diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index a664cef1..7635e3bd 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -10,27 +10,18 @@ import os import json +import sys import shutil import click +from pathlib import Path from jsonschema import validate import yaml import metomi.rose.config -######VALIDATE##### -package_dir = os.path.dirname(os.path.abspath(__file__)) -schema_path = os.path.join(package_dir, 'schema.json') -def validate_yaml(file): - """ - Using the schema.json file, the yaml format is validated. - """ - # Load the json schema: .load() (vs .loads()) reads and parses the json in one - with open(schema_path) as s: - schema = json.load(s) - - # Validate yaml - # If the yaml is not valid, the schema validation will raise errors and exit - if validate(instance=file,schema=schema) is None: - print("YAML VALID") +# Relative import +f = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(f) +import yamltools.combine_yamls as cy #################### def yaml_load(yamlfile): @@ -43,6 +34,25 @@ def yaml_load(yamlfile): return y +######VALIDATE##### +def validate_yaml(yamlfile): + """ + Using the schema.json file, the yaml format is validated. + """ + # Load the yaml + yml = yaml_load(yamlfile) + + package_dir = os.path.dirname(os.path.abspath(__file__)) + schema_path = os.path.join(package_dir, 'schema.json') + # Load the json schema: .load() (vs .loads()) reads and parses the json in one + with open(schema_path,'r') as s: + schema = json.load(s) + + # Validate yaml + # If the yaml is not valid, the schema validation will raise errors and exit + if validate(instance=yml,schema=schema) is None: + print("COMBINED YAML VALID \n") + #################### def rose_init(experiment,platform,target): """ @@ -152,8 +162,15 @@ def _yamlInfo(yamlfile,experiment,platform,target): # Initialize the rose configurations rose_suite,rose_regrid,rose_remap = rose_init(e,p,t) + # Combine model, experiment, and analysis yamls + comb = cy.init_pp_yaml(yml,e,p,t) + full_combined = cy.get_combined_ppyaml(comb) + + # Validate yaml + validate_yaml(full_combined) + # Load the combined yaml - comb_pp_yaml = yaml_load(yml) + comb_pp_yaml = yaml_load(full_combined) ## PARSE COMBINED YAML TO CREATE CONFIGS # Set rose-suite items @@ -166,7 +183,7 @@ def _yamlInfo(yamlfile,experiment,platform,target): print("Writing output files...") cylc_dir = os.path.join(os.path.expanduser("~/cylc-src"), f"{e}__{p}__{t}") outfile = os.path.join(cylc_dir, f"{e}.yaml") - shutil.copyfile(yml, outfile) + shutil.copyfile(full_combined, outfile) print(" " + outfile) dumper = metomi.rose.config.ConfigDumper() diff --git a/fre/pp/schema.json b/fre/pp/schema.json index e69b4b3d..dfb9cff5 100644 --- a/fre/pp/schema.json +++ b/fre/pp/schema.json @@ -7,57 +7,69 @@ "platform": {"type": "string"}, "target": {"type": "string"}, "directories": { - "history_dir": {"type":"string"}, - "pp_dir": {"type":"string"}, - "ptmp_dir": {"type":"string"}, - "refinediag_scripts":{"type":["string","null"]}, - "preanalysis_script":{"type":["string","null"]}, - "history_refined":{"type":["string","null"]}, - "analysis":{"type":["string","null"]}, - "pp_grid_spec": {"type":"string"}, - "fre_analysis_home": {"type":["string","null"]} + "description": "FRE shared directories", + "type": "object", + "items":{"$ref": "#/$defs/dirs" } }, "postprocess": { + "description": "FRE post-processing information", "type": "object", - "properties": { - "settings": { - "type:": "object", - "properties": { - "history_segment": {"type":"string"}, - "site": {"type":"string"}, - "pp_chunk_a": {"type":"string"}, - "pp_chunk_b": {"type":"string"}, - "pp_start": {"type":"string"}, - "pp_stop": {"type":"string"}, - "pp_components": {"type":"string"} - } - }, - "switches": { - "type": "object", - "properties": { - "clean_work": {"type":"boolean"}, - "do_mdtf": {"type":"boolean"}, - "do_statics": {"type":"boolean"}, - "do_timeavgs": {"type":"boolean"}, - "do_refinediag": {"type":"boolean"}, - "do_atmos_plevel_masking": {"type":"boolean"}, - "do_preanalysis": {"type":"boolean"}, - "do_analysis": {"type":"boolean"}, - "do_analysis_only": {"type":"boolean"} - } - }, - "components": { - "type": "array", - "properties": { - "type": {"type":"string"}, - "sources": {"type":"string"}, - "sourceGrid": {"type":"string"}, - "xyInterp": {"type":"string"}, - "interpMethod": {"type":"string"}, - "inputRealm": {"type":"string"} - } - } - } + "items":{"$ref": "#/$defs/pp" } } + }, + "$defs": { + "dirs": { + "history_dir": {"type":"string"}, + "pp_dir": {"type":"string"}, + "ptmp_dir": {"type":"string"}, + "refinediag_scripts":{"type":["string","null"]}, + "preanalysis_script":{"type":["string","null"]}, + "history_refined":{"type":["string","null"]}, + "analysis_dir":{"type":["string","null"]}, + "pp_grid_spec": {"type":"string"}, + "fre_analysis_home": {"type":["string","null"]} + }, + "pp": { + "type": "object", + "properties": { + "settings": { + "type:": "object", + "properties": { + "history_segment": {"type":"string"}, + "site": {"type":"string"}, + "pp_chunk_a": {"type":"string"}, + "pp_chunk_b": {"type":"string"}, + "pp_start": {"type":"string"}, + "pp_stop": {"type":"string"}, + "pp_components": {"type":"string"} + } + }, + "switches": { + "type": "object", + "properties": { + "clean_work": {"type":"boolean"}, + "do_mdtf": {"type":"boolean"}, + "do_statics": {"type":"boolean"}, + "do_timeavgs": {"type":"boolean"}, + "do_refinediag": {"type":"boolean"}, + "do_atmos_plevel_masking": {"type":"boolean"}, + "do_preanalysis": {"type":"boolean"}, + "do_analysis": {"type":"boolean"}, + "do_analysis_only": {"type":"boolean"} + } + }, + "components": { + "type": "array", + "properties": { + "type": {"type":"string"}, + "sources": {"type":"string"}, + "sourceGrid": {"type":"string"}, + "xyInterp": {"type":"string"}, + "interpMethod": {"type":"string"}, + "inputRealm": {"type":"string"} + } + } + } + } } } diff --git a/fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml b/fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml deleted file mode 100644 index ec011cb2..00000000 --- a/fre/pp/tests/AM5_example/combined-c96L65_am5f7b12r1_amip.yaml +++ /dev/null @@ -1,230 +0,0 @@ -name: c96L65_am5f7b12r1_amip -platform: gfdl.ncrc5-intel22-classic -target: prod-openmp -compile: - experiment: am5 - container_addlibs: null - baremetal_linkerflags: null - src: - - component: FMS - repo: https://github.com/NOAA-GFDL/FMS.git - cppdefs: -DHAVE_GETTID -Duse_libMPI -Duse_netCDF - branch: '2022.01' - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: am5_phys - requires: - - FMS - repo: https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git - branch: '2022.01' - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: GFDL_atmos_cubed_sphere - requires: - - FMS - - am5_phys - repo: https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git - cppdefs: -DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML - branch: '2022.01' - paths: - - GFDL_atmos_cubed_sphere/driver/GFDL - - GFDL_atmos_cubed_sphere/model - - GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90 - - GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90 - - GFDL_atmos_cubed_sphere/tools - - GFDL_atmos_cubed_sphere/GFDL_tools - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: atmos_drivers - requires: - - FMS - - am5_phys - - GFDL_atmos_cubed_sphere - repo: https://github.com/NOAA-GFDL/atmos_drivers.git - cppdefs: -DSPMD -DCLIMATE_NUDGE - branch: '2022.01' - paths: - - atmos_drivers/coupled - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: ice_sis - requires: - - FMS - - ice_param - - mom6 - repo: https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git - branch: '2021.02' - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include - - component: ice_param - repo: https://github.com/NOAA-GFDL/ice_param.git - cppdefs: -Duse_yaml -Duse_libMPI -Duse_netCDF - branch: '2021.02' - requires: - - FMS - - mom6 - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include - - component: land_lad2 - requires: - - FMS - repo: https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git - branch: land_lad2_2021.02 - doF90Cpp: true - cppdefs: -DINTERNAL_FILE_NML - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include - - component: mom6 - requires: - - FMS - paths: - - mom6/MOM6-examples/src/MOM6/config_src/dynamic - - mom6/MOM6-examples/src/MOM6/config_src/coupled_driver - - mom6/MOM6-examples/src/MOM6/src/*/ - - mom6/MOM6-examples/src/MOM6/src/*/*/ - - mom6/ocean_BGC/generic_tracers - - mom6/ocean_BGC/mocsy/src - branch: - - '2021.02' - - dev/gfdl/2018.04.06 - repo: - - https://github.com/NOAA-GFDL/ocean_BGC.git - - https://github.com/NOAA-GFDL/MOM6-examples.git - makeOverrides: OPENMP="" - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include - - component: FMScoupler - paths: - - FMScoupler/full - - FMScoupler/shared - repo: https://github.com/NOAA-GFDL/FMScoupler.git - branch: '2022.01' - requires: - - FMS - - atmos_drivers - - am5_phys - - land_lad2 - - ice_sis - - ice_param - - mom6 - otherFlags: -IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include -Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include -platforms: -- name: ncrc5.intel - compiler: intel - modulesInit: - - " module use -a /ncrc/home2/fms/local/modulefiles \n" - - "source $MODULESHOME/init/sh \n" - modules: - - intel-classic/2022.2.1 - - fre/bronx-20 - - cray-hdf5/1.12.2.3 - - cray-netcdf/4.9.0.3 - fc: ftn - cc: cc - mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk - modelRoot: ${HOME}/fremake_canopy/test -- name: ncrc5.intel23 - compiler: intel - modulesInit: - - " module use -a /ncrc/home2/fms/local/modulefiles \n" - - "source $MODULESHOME/init/sh \n" - modules: - - intel-classic/2023.1.0 - - fre/bronx-20 - - cray-hdf5/1.12.2.3 - - cray-netcdf/4.9.0.3 - fc: ftn - cc: cc - mkTemplate: /ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk - modelRoot: /ncrc/home1/Dana.Singh/fre/fre-cli/fre/make/tests/AM5_example/combine_yamls/test -- name: hpcme.2023 - compiler: intel - RUNenv: - - . /spack/share/spack/setup-env.sh - - spack load libyaml - - spack load netcdf-fortran@4.5.4 - - spack load hdf5@1.14.0 - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: true - containerBuild: podman - containerRun: apptainer -directories: - history_dir: /archive/$USER/am5/am5f7b12r1/c96L65_am5f7b12r1_amip/gfdl.ncrc5-intel22-classic-prod-openmp/history - pp_dir: /archive/$USER/am5/am5f7b12r1/c96L65_am5f7b12r1_amip/gfdl.ncrc5-intel22-classic-prod-openmp/pp - analysis_dir: /nbhome/$USER/am5/am5f7b12r1/c96L65_am5f7b12r1_amip - ptmp_dir: /ptmp/$USER - fre_analysis_home: /home/fms/local/opt/fre-analysis/test - pp_grid_spec: /archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar -postprocess: - settings: - history_segment: P1Y - site: ppan - pp_start: 19800101T0000Z - pp_stop: 20200101T0000Z - pp_chunk_a: P1Y - pp_components: atmos atmos_scalar - switches: - do_statics: false - do_timeavgs: true - clean_work: true - do_refinediag: false - do_atmos_plevel_masking: true - do_preanalysis: false - do_analysis: true - components: - - type: atmos_cmip - sources: atmos_month_cmip atmos_8xdaily_cmip atmos_daily_cmip - sourceGrid: cubedsphere - xyInterp: 180,360 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos - sources: atmos_month - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_level_cmip - sources: atmos_level_cmip - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_level - sources: atmos_month - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_month_aer - sources: atmos_month_aer - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: atmos - - type: atmos_diurnal - sources: atmos_diurnal - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order2 - inputRealm: atmos - - type: atmos_scalar - sources: atmos_scalar - - type: aerosol_cmip - xyInterp: 180,288 - sources: aerosol_month_cmip - sourceGrid: cubedsphere - interpMethod: conserve_order1 - inputRealm: atmos - - type: land - sources: land_month - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: land - - type: land_cmip - sources: land_month_cmip - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: land - - type: tracer_level - sources: atmos_tracer - sourceGrid: cubedsphere - xyInterp: 180,288 - interpMethod: conserve_order1 - inputRealm: atmos diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 70a1e74f..1f61efd4 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -10,7 +10,7 @@ # Set example yaml paths, input directory CWD = Path.cwd() test_dir = Path("fre/pp/tests") -test_yaml = Path(f"AM5_example/combined-{experiment}.yaml") +test_yaml = Path(f"AM5_example/am5.yaml") # Set home for ~/cylc-src location in script os.environ["HOME"]=str(Path(f"{CWD}/{test_dir}/configure_yaml_out")) @@ -27,18 +27,23 @@ def test_configure_script(): Creates rose-suite, regrid rose-app, remap rose-app TO-DO: will break this up for better tests """ + os.chdir(f"{CWD}/{test_dir}/AM5_example") + # Set output directory out_dir = Path(f"{os.getenv('HOME')}/cylc-src/{experiment}__{platform}__{target}") Path(out_dir).mkdir(parents=True,exist_ok=True) # Define combined yaml - comb_yaml = str(Path(f"{CWD}/{test_dir}/{test_yaml}")) + model_yaml = str(Path(f"{CWD}/{test_dir}/{test_yaml}")) # Invoke configure_yaml_script.py - csy._yamlInfo(comb_yaml,experiment,platform,target) + csy._yamlInfo(model_yaml,experiment,platform,target) # Check for configuration creation and final combined yaml assert all([Path(f"{out_dir}/{experiment}.yaml").exists(), Path(f"{out_dir}/rose-suite.conf").exists(), Path(f"{out_dir}/app/regrid-xy/rose-app.conf").exists(), Path(f"{out_dir}/app/remap-pp-components/rose-app.conf").exists()]) + + # Go back to original directory + os.chdir(CWD) diff --git a/fre/tests/test_fre_catalog_cli.py b/fre/tests/test_fre_catalog_cli.py index 831003bf..e6464007 100644 --- a/fre/tests/test_fre_catalog_cli.py +++ b/fre/tests/test_fre_catalog_cli.py @@ -26,7 +26,7 @@ def test_cli_fre_catalog_builder(): result = runner.invoke(fre.fre, args=["catalog", "builder"]) assert all( [ result.exit_code == 1, - 'No paths given, using yaml configuration' + 'Missing: input_path or output_path. Pass it in the config yaml or as command-line option' in result.stdout.split('\n') ] ) diff --git a/fre/yamltools/README.md b/fre/yamltools/README.md index fd0f8f55..f6d896f3 100644 --- a/fre/yamltools/README.md +++ b/fre/yamltools/README.md @@ -1,6 +1,17 @@ ## FRE yamltools -### Tools: -- `combine-yaml.py`: creates a `combined-[experiment name].yaml` file in which the [model].yaml, compile.yaml, platforms.yaml, [experiment].yaml, and [analysisscript].yaml are merged +`fre yamltools` provides subtools that help to manage and perform operations on yaml files. + +## Subtools +- `fre yamltools combine-yamls [options]` + - Purpose: + - Creates combined yaml file in which the [model].yaml, compile.yaml, and platforms.yaml are merged if `--use compile` is specified + - Creates combined yaml file in which the [model].yaml, [experiment].yaml, and [analysis].yaml are merged if `--use pp` is specified + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-e, --experiment [experiment name]` + - `--use [compile|pp] (required)` ### **Tests** diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 3d0fdc01..7d9ff585 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -8,14 +8,12 @@ ## TO-DO: # - figure out way to safe_load (yaml_loader=yaml.SafeLoader?) # - condition where there are multiple pp and analysis yamls -# - fix schema for validation import os import json import shutil from pathlib import Path import click -from jsonschema import validate import yaml def join_constructor(loader, node): @@ -35,32 +33,43 @@ def yaml_load(yamlfile): return y -def combine_model(modelyaml,combined,experiment,platform,target): +def get_compile_paths(mainyaml_dir,comb): """ - Create the combined.yaml and merge it with the model yaml - Arguments: - modelyaml : model yaml file - combined : final combined file name - experiment : experiment name - platform : platform used - target : targets used + Extract compile and platform paths from model yaml """ - # copy model yaml info into combined yaml - with open(combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{experiment}"\n') - f1.write(f'platform: &platform "{platform}"\n') - f1.write(f'target: &target "{target}"\n\n') - with open(modelyaml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - print(f" model yaml: {modelyaml}") + comb_model=yaml_load(comb) + + # set platform yaml filepath + if comb_model["build"]["platformYaml"] is not None: + if Path(comb_model["build"]["platformYaml"]).exists(): + py=comb_model["build"]["platformYaml"] + py_path=Path(os.path.join(mainyaml_dir,py)) + else: + raise ValueError("Incorrect platform yaml path given; does not exist.") + else: + raise ValueError("No platform yaml path given!") + #py_path=None + + # set compile yaml filepath + if comb_model["build"]["compileYaml"] is not None: + if Path(comb_model["build"]["compileYaml"]).exists(): + cy=comb_model["build"]["compileYaml"] + cy_path=Path(os.path.join(mainyaml_dir,cy)) + else: + raise ValueError("Incorrect compile yaml path given; does not exist.") + else: + raise ValueError("No compile yaml path given!") + #cy_path=None + + return (py_path,cy_path) + def experiment_check(mainyaml_dir,comb,experiment): """ Check that the experiment given is an experiment listed in the model yaml. Extract experiment specific information and file paths. Arguments: - mainyaml_dir : model yaml file + mainyaml_dir : model yaml file comb : combined yaml file name experiment : experiment name """ @@ -74,193 +83,298 @@ def experiment_check(mainyaml_dir,comb,experiment): if experiment not in exp_list: raise NameError(f"{experiment} is not in the list of experiments") - # set platform yaml filepath - if comb_model["shared"]["compile"]["platformYaml"] is not None: - py=comb_model["shared"]["compile"]["platformYaml"] - py_path=Path(os.path.join(mainyaml_dir,py)) - else: - py_path=None - # Extract compile yaml path for exp. provided # if experiment matches name in list of experiments in yaml, extract file path for i in comb_model.get("experiments"): if experiment == i.get("name"): - compileyaml=i.get("compile") expyaml=i.get("pp") analysisyaml=i.get("analysis") - - if compileyaml is not None: - cy_path=Path(os.path.join(mainyaml_dir,compileyaml)) - else: - cy_path=None - + if expyaml is not None: ey_path=[] for e in expyaml: - ey=Path(os.path.join(mainyaml_dir,e)) - ey_path.append(ey) + if Path(e).exists(): + ey=Path(os.path.join(mainyaml_dir,e)) + ey_path.append(ey) + else: + raise ValueError("Incorrect experiment yaml path given; does not exist.") else: - ey_path=None + raise ValueError("No experiment yaml path given!") if analysisyaml is not None: ay_path=[] for a in analysisyaml: - ay=Path(os.path.join(mainyaml_dir,a)) - ay_path.append(ay) + if Path(a).exists(): + ay=Path(os.path.join(mainyaml_dir,a)) + ay_path.append(ay) + else: + raise ValueError("Incorrect analysis yaml ath given; does not exist.") else: ay_path=None - return (py_path,cy_path,ey_path,ay_path) + return (ey_path,ay_path) -def combine_compile(comb_m,compileyaml): +## COMPILE CLASS ## +class init_compile_yaml(): + def __init__(self,yamlfile,platform,target): + """ + Process to combine yamls appllicable to compilation + """ + self.yml = yamlfile + self.name = yamlfile.split(".")[0] + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + + print(f" model yaml: {self.yml}") + + def combine_compile(self): """ Combine compile yaml with the defined combined.yaml - Arguments: - comb_m : combined model yaml file - compileyaml : compile yaml file """ - combined = comb_m + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) # copy compile yaml info into combined yaml - if compileyaml is not None: - with open(combined,'a',encoding='UTF-8') as f1: - with open(compileyaml,'r',encoding='UTF-8') as f2: + if cy_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(cy_path,'r',encoding='UTF-8') as f2: f1.write("\n### COMPILE INFO ###\n") shutil.copyfileobj(f2,f1) - print(f" compile yaml: {compileyaml}") + print(f" compile yaml: {cy_path}") -def combine_platforms(comb_mc,platformsyaml): + def combine_platforms(self): """ Combine platforms yaml with the defined combined.yaml - Arguments: - comb_mc : combined model and compile yaml file - platformsyaml : platforms yaml file """ - combined = comb_mc + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) + # combine platform yaml - if platformsyaml is not None: - with open(combined,'a',encoding='UTF-8') as f1: - with open(platformsyaml,'r',encoding='UTF-8') as f2: + if py_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(py_path,'r',encoding='UTF-8') as f2: f1.write("\n### PLATFORM INFO ###\n") shutil.copyfileobj(f2,f1) - print(f" platforms yaml: {platformsyaml}") + print(f" platforms yaml: {py_path}") + + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") + return self.combined + +## PP CLASS ## +class init_pp_yaml(): + def __init__(self,yamlfile,experiment,platform,target): + """ + Process to combine the applicable yamls for post-processing + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target -def combine_experiments(comb_mcp,expyaml): + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + + print(f" model yaml: {self.yml}") + + def combine_experiment(self): """ Combine experiment yamls with the defined combined.yaml - Arguments: - comb_mcp : combined model, compile, and platforms yaml file - expyaml : experiment yaml files """ - combined = comb_mcp + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + ## COMBINE EXPERIMENT YAML INFO - if expyaml is not None: - for i in expyaml: + if ey_path is not None: + for i in ey_path: #expyaml_path = os.path.join(mainyaml_dir, i) - with open(combined,'a',encoding='UTF-8') as f1: + with open(self.combined,'a',encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: - #f1.write(f"\n### {i.upper()} settings ###\n") #copy expyaml into combined shutil.copyfileobj(f2,f1) print(f" experiment yaml: {i}") -def combine_analysis(comb_mcpe,analysisyaml): + def combine_analysis(self): """ Combine analysis yamls with the defined combined.yaml - Arguments: - comb_mcpe : combined model, compile, platforms, and experiment yaml file - analysisyaml : analysis yaml file """ - combined = comb_mcpe + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) ## COMBINE EXPERIMENT YAML INFO - if analysisyaml is not None: - for i in analysisyaml: + if ay_path is not None: + for i in ay_path: #analysisyaml_path = os.path.join(mainyaml_dir, i) - with open(combined,'a',encoding='UTF-8') as f1: + with open(self.combined,'a',encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: #f1.write(f"\n### {i.upper()} settings ###\n") #copy expyaml into combined shutil.copyfileobj(f2,f1) print(f" analysis yaml: {i}") -###### VALIDATE ##### FIX VALIDATION ##### -package_dir = os.path.dirname(os.path.abspath(__file__)) -schema_path = os.path.join(package_dir, 'schema.json') -def validate_yaml(file): + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + with open(self.combined,'w') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") + return self.combined + +## Functions to combine the yaml files ## +def get_combined_compileyaml(comb): """ - Using the schema.json file, the yaml format is validated. + Combine the model, compile, and platform yamls Arguments: - file : combined yaml file + - comb : combined yaml object """ - # Load the json schema: .load() (vs .loads()) reads and parses the json in one - with open(schema_path) as s: - schema = json.load(s) + # Merge model into combined file + comb_model = comb.combine_model() + # Merge compile.yaml into combined file + comb_compile = comb.combine_compile() + # Merge platforms.yaml into combined file + full_combined = comb.combine_platforms() + # Clean the yaml + full_combined = comb.clean_yaml() - # Validate yaml - # If the yaml is not valid, the schema validation will raise errors and exit - if validate(instance=file,schema=schema) is None: - print("YAML VALID") + return full_combined -###### MAIN ##### -def _consolidate_yamls(yamlfile,experiment, platform,target): +def combined_compile_existcheck(combined,yml,platform,target): """ - Process to combine and validate the yamls + Checks for if combined compile yaml exists already. + If not, combine model, compile, and platform yamls. """ - # Regsiter tag handler - yaml.add_constructor('!join', join_constructor) - - # Path to the main model yaml - mainyaml_dir = os.path.dirname(yamlfile) - - # Name of the combined yaml - combined=f"combined-{experiment}.yaml" + cd = Path.cwd() + combined_path=os.path.join(cd,combined) + + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): + full_combined = combined_path + print("\nNOTE: Yamls previously merged.") + else: + comb = init_compile_yaml(yml,platform,target) + full_combined = get_combined_compileyaml(comb) - print("Combining yaml files: ") + return full_combined +def get_combined_ppyaml(comb): + """ + Combine the model, experiment, and analysis yamls + Arguments: + - comb : combined yaml object + """ # Merge model into combined file - combine_model(yamlfile,combined,experiment,platform,target) - - # Experiment check - (py_path,cy_path,ey_path,ay_path) = experiment_check(mainyaml_dir,combined,experiment) - - # Merge compile.yaml into combined file - combine_compile(combined,cy_path) - - # Merge platforms.yaml into combined file - combine_platforms(combined,py_path) - + comb_model = comb.combine_model() # Merge pp experiment yamls into combined file - combine_experiments(combined,ey_path) - + comb_exp = comb.combine_experiment() # Merge pp analysis yamls, if defined, into combined file - combine_analysis(combined,ay_path) + comb_analysis = comb.combine_analysis() + # Clean the yaml + full_combined = comb.clean_yaml() - # Load the fully combined yaml - full_yaml = yaml_load(combined) + return full_combined - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] +########################################################################################### +def _consolidate_yamls(yamlfile,experiment,platform,target,use): + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) - with open(combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,sort_keys=False) + # Path to the main model yaml + mainyaml_dir = os.path.dirname(yamlfile) - print(f"Combined yaml located here: {os.path.dirname(combined)}/{combined}") -## TO-DO: fix schema for validation -# # validate yaml -# validate_yaml(full.yaml) + if use == "compile": + combined = init_compile_yaml(yamlfile, platform, target) + # Create combined compile yaml + get_combined_compileyaml(combined) + elif use =="pp": + combined = init_pp_yaml(yamlfile,experiment,platform,target) + # Create combined pp yaml + get_combined_ppyaml(combined) + else: + raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") @click.command() -def consolidate_yamls(yamlfile,experiment, platform,target): +def consolidate_yamls(yamlfile,experiment,platform,target,use): ''' Wrapper script for calling yaml_combine - allows the decorated version of the function to be separate from the undecorated version ''' - return _consolidate_yamls(yamlfile,experiment, platform,target) + return _consolidate_yamls(yamlfile,experiment,platform,target,use) # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index d190b243..fc144a5e 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -25,8 +25,7 @@ def function(context, uppercase): @click.option("-e", "--experiment", type=str, - help="Experiment name", - required=True) + help="Experiment name") @click.option("-p", "--platform", type=str, @@ -37,8 +36,12 @@ def function(context, uppercase): type=str, help="Target name", required=True) +@click.option("--use", + type=click.Choice(['compile','pp']), + help="Process user is combining yamls for. Can pass 'compile' or 'pp'", + required=True) @click.pass_context -def combine_yamls(context,yamlfile,experiment,platform,target): +def combine_yamls(context,yamlfile,experiment,platform,target,use): """ - Combine the model yaml with the compile, platform, experiment, and analysis yamls diff --git a/fre/yamltools/tests/AM5_example/am5.yaml b/fre/yamltools/tests/AM5_example/am5.yaml index 05391f3b..c57c997c 100644 --- a/fre/yamltools/tests/AM5_example/am5.yaml +++ b/fre/yamltools/tests/AM5_example/am5.yaml @@ -38,12 +38,12 @@ fre_properties: - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" -shared: +build: # compile information - compile: - compileYaml: &compile_yaml "compile.yaml" - platformYaml: "yaml_include/platforms.yaml" - + compileYaml: "compile_yamls/compile.yaml" + platformYaml: "compile_yamls/platforms.yaml" + +shared: # directories shared across tools directories: &shared_directories history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] @@ -69,47 +69,4 @@ shared: experiments: - name: "c96L65_am5f7b12r1_amip" pp: - - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml - - name: "c96L65_am5f7b12r1_pdclim1850F" - pp: - - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - - name: "c96L65_am5f7b12r1_pdclim2010F" - pp: - - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - - name: "c96L65_am5f7b12r1_pdclim2010AERF" - pp: - - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - - name: "c384L65_am5f7b12r1_amip" - pp: - - "yaml_include/pp.c384_amip.yaml" - compile: *compile_yaml - - name: "c384L65_am5f7b12r1_pdclim2010F" - pp: - - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - - name: "c384L65_am5f7b12r1_pdclim1850F" - pp: - - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - - name: "c384L65_am5f7b12r1_pdclim2010AERF" - pp: - - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" - pp: - - "yaml_include/pp.c384_amip.yaml" - - "yaml_include/pp.om4.yaml" - compile: *compile_yaml - - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" - pp: - - "yaml_include/pp.c96_amip.yaml" - - "yaml_include/pp.om4.yaml" - compile: *compile_yaml - - name: "c96L65_am5f7b12r1_amip_cosp" - pp: - - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml + - "pp_yamls/pp.c96_amip.yaml" diff --git a/fre/yamltools/tests/AM5_example/compile.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile.yaml similarity index 100% rename from fre/yamltools/tests/AM5_example/compile.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile.yaml diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_compilefile.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_compilefile.yaml new file mode 100644 index 00000000..86bbb57a --- /dev/null +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_compilefile.yaml @@ -0,0 +1,72 @@ +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "P1Y" + - &PP_AMIP_CHUNK384 "P1Y" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "P5Y" + - &PP_CPLD_CHUNK_B "P20Y" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +build: + # compile information + compileYaml: "compiile.yaml" + platformYaml: "wrong_platforms.yaml" + +shared: + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_datatype.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_datatype.yaml new file mode 100644 index 00000000..e65f3bd2 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/am5-wrong_datatype.yaml @@ -0,0 +1,72 @@ +# reusable variables +fre_properties: + - &AM5_VERSION "am5f7b12r1" + - &FRE_STEM !join [am5/, *AM5_VERSION] + + # amip + - &EXP_AMIP_START "1979" + - &EXP_AMIP_END "2020" + - &ANA_AMIP_START "1980" + - &ANA_AMIP_END "2020" + + - &PP_AMIP_CHUNK96 "P1Y" + - &PP_AMIP_CHUNK384 "P1Y" + - &PP_XYINTERP96 "180,288" + - &PP_XYINTERP384 "720,1152" + + # climo + - &EXP_CLIMO_START96 "0001" + - &EXP_CLIMO_END96 "0011" + - &ANA_CLIMO_START96 "0002" + - &ANA_CLIMO_END96 "0011" + + - &EXP_CLIMO_START384 "0001" + - &EXP_CLIMO_END384 "0006" + - &ANA_CLIMO_START384 "0002" + - &ANA_CLIMO_END384 "0006" + + # coupled + - &PP_CPLD_CHUNK_A "P5Y" + - &PP_CPLD_CHUNK_B "P20Y" + + # grids + - &GRID_SPEC96 "/archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar" + + # compile information + - &release "f1a1r1" + - &INTEL "intel-classic" + - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" + - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" + +build: + # compile information + compileYaml: "wrong_compile.yaml" + platformYaml: "wrong_platforms.yaml" + +shared: + # directories shared across tools + directories: &shared_directories + history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] + pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] + analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] + ptmp_dir: "/xtmp/$USER/ptmp" + fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" + + # shared pp settings + postprocess: + settings: &shared_settings + history_segment: "P1Y" + site: "ppan" + switches: &shared_switches + do_statics: True + do_timeavgs: True + clean_work: True + do_refinediag: False + do_atmos_plevel_masking: True + do_preanalysis: False + do_analysis: True + +experiments: + - name: "c96L65_am5f7b12r1_amip" + pp: + - diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_compile.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_compile.yaml new file mode 100644 index 00000000..c122764a --- /dev/null +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_compile.yaml @@ -0,0 +1,11 @@ +compile: + experiment: "am5" + container_addlibs: + baremetal_linkerflags: + src: + - component: + repo: "https://github.com/NOAA-GFDL/FMS.git" + cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" + branch: 2022.01 + cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" + otherFlags: *FMSincludes diff --git a/fre/make/tests/AM5_example/platforms.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_platforms.yaml similarity index 82% rename from fre/make/tests/AM5_example/platforms.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_platforms.yaml index 02b7d222..7e1b9f49 100644 --- a/fre/make/tests/AM5_example/platforms.yaml +++ b/fre/yamltools/tests/AM5_example/compile_yamls/compile_fail/wrong_platforms.yaml @@ -2,7 +2,7 @@ platforms: - name: ncrc5.intel compiler: intel modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" @@ -10,7 +10,7 @@ platforms: - name: ncrc5.intel23 compiler: intel modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] fc: ftn cc: cc mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" diff --git a/fre/yamltools/tests/AM5_example/yaml_include/platforms.yaml b/fre/yamltools/tests/AM5_example/compile_yamls/platforms.yaml similarity index 89% rename from fre/yamltools/tests/AM5_example/yaml_include/platforms.yaml rename to fre/yamltools/tests/AM5_example/compile_yamls/platforms.yaml index af3a702c..7e1b9f49 100644 --- a/fre/yamltools/tests/AM5_example/yaml_include/platforms.yaml +++ b/fre/yamltools/tests/AM5_example/compile_yamls/platforms.yaml @@ -14,7 +14,7 @@ platforms: fc: ftn cc: cc mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: "/ncrc/home1/Dana.Singh/fre/fre-cli/fre/make/tests/AM5_example/combine_yamls/test" #${HOME}/fremake_canopy/test + modelRoot: ${HOME}/fremake_canopy/test - name: hpcme.2023 compiler: intel RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] diff --git a/fre/yamltools/tests/AM5_example/compile_yamls/schema.json b/fre/yamltools/tests/AM5_example/compile_yamls/schema.json new file mode 120000 index 00000000..7f618a23 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/compile_yamls/schema.json @@ -0,0 +1 @@ +../../../../make/gfdlfremake/schema.json \ No newline at end of file diff --git a/fre/yamltools/tests/AM5_example/pp_yamls/pp.c96_amip.yaml b/fre/yamltools/tests/AM5_example/pp_yamls/pp.c96_amip.yaml new file mode 100644 index 00000000..117c66c6 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/pp_yamls/pp.c96_amip.yaml @@ -0,0 +1,88 @@ +# local reusable variable overrides +fre_properties: + - &custom_interp "180,360" + +# directory overrides +#c96_amip_directories: +directories: + <<: *shared_directories + ptmp_dir: "/ptmp/$USER" + pp_grid_spec: *GRID_SPEC96 + +#c96_amip_postprocess: +postprocess: + # pp setting overrides + settings: + <<: *shared_settings + pp_start: *ANA_AMIP_START + pp_stop: *ANA_AMIP_END + pp_chunk_a: *PP_AMIP_CHUNK96 + pp_components: "atmos atmos_scalar" + switches: + <<: *shared_switches + do_statics: False + + # main pp instructions + components: + - type: "atmos_cmip" + sources: "atmos_month_cmip atmos_8xdaily_cmip atmos_daily_cmip" + sourceGrid: "cubedsphere" + xyInterp: *custom_interp + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos" + sources: "atmos_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_level_cmip" + sources: "atmos_level_cmip" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_level" + sources: "atmos_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_month_aer" + sources: "atmos_month_aer" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'atmos' + - type: "atmos_diurnal" + sources: "atmos_diurnal" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_scalar" + sources: "atmos_scalar" + - type: "aerosol_cmip" + xyInterp: *PP_XYINTERP96 + sources: "aerosol_month_cmip" + sourceGrid: "cubedsphere" + interpMethod: "conserve_order1" + inputRealm: 'atmos' + - type: "land" + sources: "land_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'land' + - type: "land_cmip" + sources: "land_month_cmip" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'land' + - type: "tracer_level" + sources: "atmos_tracer" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order1" + inputRealm: 'atmos' diff --git a/fre/yamltools/tests/AM5_example/pp_yamls/schema.json b/fre/yamltools/tests/AM5_example/pp_yamls/schema.json new file mode 120000 index 00000000..a76dc8fc --- /dev/null +++ b/fre/yamltools/tests/AM5_example/pp_yamls/schema.json @@ -0,0 +1 @@ +../../../../pp/schema.json \ No newline at end of file diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index 79d58a67..25b8a2a5 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -2,7 +2,12 @@ from pathlib import Path import pytest import shutil +import json +import yaml +from jsonschema import validate, ValidationError, SchemaError from fre.yamltools import combine_yamls as cy +from multiprocessing import Process + ## SET-UP # Set example yaml paths, input directory, output directory @@ -10,20 +15,28 @@ test_dir = Path("fre/yamltools/tests") in_dir = Path(f"{CWD}/{test_dir}/AM5_example") -# Create output directory -out_dir = Path(f"{CWD}/{test_dir}/combine_yamls_out") +# Create output directories +comp_out_dir = Path(f"{CWD}/{test_dir}/combine_yamls_out/compile") +pp_out_dir = Path(f"{CWD}/{test_dir}/combine_yamls_out/pp") # If output directory exists, remove and create again -if out_dir.exists(): - shutil.rmtree(out_dir) - Path(out_dir).mkdir(parents=True,exist_ok=True) -else: - Path(out_dir).mkdir(parents=True,exist_ok=True) +for out in [comp_out_dir, pp_out_dir]: + if out.exists(): + shutil.rmtree(out) + Path(out).mkdir(parents=True,exist_ok=True) + else: + Path(out).mkdir(parents=True,exist_ok=True) + +## Set what would be click options +# Compile +COMP_EXPERIMENT = "am5" +COMP_PLATFORM = "ncrc5.intel23" +COMP_TARGET = "prod" -# Set what would be click options -EXPERIMENT = "c96L65_am5f7b12r1_amip" -PLATFORM = "gfdl.ncrc5-intel22-classic" -TARGET = "prod" +# Post-processing +PP_EXPERIMENT = "c96L65_am5f7b12r1_amip" +PP_PLATFORM = "gfdl.ncrc5-intel22-classic" +PP_TARGET = "prod" def test_modelyaml_exists(): """ @@ -35,44 +48,176 @@ def test_compileyaml_exists(): """ Make sure experiment yaml file exists """ - assert Path(f"{in_dir}/compile.yaml").exists() + assert Path(f"{in_dir}/compile_yamls/compile.yaml").exists() def test_platformyaml_exists(): """ Make sure experiment yaml file exists """ - assert Path(f"{in_dir}/yaml_include/platforms.yaml").exists() + assert Path(f"{in_dir}/compile_yamls/platforms.yaml").exists() + +def test_merged_compile_yamls(): + """ + Check for the creation of the combined-[experiment] yaml + Check that the model yaml was merged into the combined yaml + """ + # Go into the input directory + os.chdir(in_dir) + + # Model yaml path + modelyaml = "am5.yaml" + USE = "compile" + + # Merge the yamls + cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) + + # Move combined yaml to output location + shutil.move(f"combined-am5.yaml", comp_out_dir) + + # Check that the combined yaml exists + assert Path(f"{comp_out_dir}/combined-{COMP_EXPERIMENT}.yaml").exists() + + # Go back to original directory + os.chdir(CWD) + +def test_combined_compileyaml_validation(): + """ + Validate the combined compile yaml + """ + combined_yamlfile =f"{comp_out_dir}/combined-{COMP_EXPERIMENT}.yaml" + schema_file = os.path.join(f"{in_dir}","compile_yamls","schema.json") + + with open(combined_yamlfile,'r') as cf: + yml = yaml.safe_load(cf) + + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) + + # If the yaml is valid, no issues + # If the yaml is not valid, error + try: + validate(instance=yml,schema=schema) + except: + assert False + +def test_combined_compileyaml_combinefail(): + """ + Check to test if compile yaml is incorrect/does not exist, + the combine fails. (compile yaml path misspelled) + """ + # Go into the input directory + os.chdir(f"{in_dir}/compile_yamls/compile_fail") + # Model yaml path + modelyaml = f"am5-wrong_compilefile.yaml" + USE = "compile" + + # Merge the yamls - should fail since there is no compile yaml specified in the model yaml + try: + consolidate = cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) + # Move combined yaml to output location + shutil.move(f"combined-am5-wrong_compilefile.yaml", comp_out_dir) + except: + print("EXPECTED FAILURE") + # Move combined yaml to output location + shutil.move(f"combined-am5-wrong_compilefile.yaml", comp_out_dir) + assert True + + # Go back to original directory + os.chdir(CWD) + +def test_combined_compileyaml_validatefail(): + """ + Check if the schema is validating correctly + Branch should be string + """ + # Go into the input directory + os.chdir(f"{in_dir}/compile_yamls/compile_fail") + + # Model yaml path + modelyaml = "am5-wrong_datatype.yaml" + USE = "compile" + + # Merge the yamls + cy._consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, USE) + + # Move combined yaml to output location + shutil.move(f"combined-am5-wrong_datatype.yaml", comp_out_dir) + + # Validate against schema; should fail + wrong_combined = Path(f"{comp_out_dir}/combined-am5-wrong_datatype.yaml") + schema_file = os.path.join(f"{in_dir}","compile_yamls","schema.json") + + # Open/load combined yaml file + with open(wrong_combined,'r') as cf: + yml = yaml.safe_load(cf) + + # Open/load schema.jaon + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) + + # Validation should fail + try: + validate(instance=yml,schema=schema) + except: + assert True + + # Go back to original directory + os.chdir(CWD) + +############ PP ############ def test_expyaml_exists(): """ Make sure experiment yaml file exists """ - assert Path(f"{in_dir}/yaml_include/pp.c96_amip.yaml").exists() + assert Path(f"{in_dir}/pp_yamls/pp.c96_amip.yaml").exists() -@pytest.mark.skip(reason='analysis scripts might not be defined') +@pytest.mark.skip(reason='analysis scripts might not be defined yet') def test_analysisyaml_exists(): """ Make sure experiment yaml file exists """ - assert Path(f"{in_dir}/yaml_include/analysis.yaml").exists() + assert Path(f"{in_dir}/pp_yamls/analysis.yaml").exists() -def test_merged_yamls(): +def test_merged_pp_yamls(): """ - Check for the creation of the combined-[experiment] yaml + Check for the creation of the combined-[experiment] yaml Check that the model yaml was merged into the combined yaml """ + # Go into the input directory + os.chdir(in_dir) + # Model yaml path modelyaml = Path(f"{in_dir}/am5.yaml") + USE = "pp" # Merge the yamls - cy._consolidate_yamls(modelyaml,EXPERIMENT, PLATFORM, TARGET) + cy._consolidate_yamls(modelyaml, PP_EXPERIMENT, PP_PLATFORM, PP_TARGET, USE) # Move combined yaml to output location - shutil.move(f"combined-{EXPERIMENT}.yaml", out_dir) + shutil.move(f"combined-{PP_EXPERIMENT}.yaml", pp_out_dir) # Check that the combined yaml exists - assert Path(f"{out_dir}/combined-{EXPERIMENT}.yaml").exists() + assert Path(f"{pp_out_dir}/combined-{PP_EXPERIMENT}.yaml").exists() + + # Go back to original directory + os.chdir(CWD) + +def test_combined_ppyaml_validation(): + """ + Validate the combined compile yaml + """ + combined_yamlfile =f"{pp_out_dir}/combined-{PP_EXPERIMENT}.yaml" + schema_dir = Path(f"{in_dir}/pp_yamls") + schema_file = os.path.join(schema_dir, 'schema.json') + + with open(combined_yamlfile,'r') as cf: + yml = yaml.safe_load(cf) + + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) -##TO-DO: -# - check for correct yaml merging -# - validation + validate(instance=yml,schema=schema) diff --git a/setup.py b/setup.py index d9b97785..751efcc1 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name='fre-cli', - version='0.1.6', + version='2024.01', description='Command Line Interface for FRE commands', author='MSD Workflow Team, Bennett Chang, Dana Singh, Chris Blanton', author_email='oar.gfdl.workflow@noaa.gov',