diff --git a/compass/ocean/tests/global_ocean/__init__.py b/compass/ocean/tests/global_ocean/__init__.py index a394b3bf64..f7042e8fb6 100644 --- a/compass/ocean/tests/global_ocean/__init__.py +++ b/compass/ocean/tests/global_ocean/__init__.py @@ -1,28 +1,31 @@ -from compass.testgroup import TestGroup - -from compass.ocean.tests.global_ocean.mesh import Mesh -from compass.ocean.tests.global_ocean.mesh.qu240.dynamic_adjustment import \ - QU240DynamicAdjustment -from compass.ocean.tests.global_ocean.mesh.ec30to60.dynamic_adjustment import \ - EC30to60DynamicAdjustment -from compass.ocean.tests.global_ocean.mesh.arrm10to60.dynamic_adjustment \ - import ARRM10to60DynamicAdjustment -from compass.ocean.tests.global_ocean.mesh.so12to60.dynamic_adjustment import \ - SO12to60DynamicAdjustment -from compass.ocean.tests.global_ocean.mesh.wc14.dynamic_adjustment import \ - WC14DynamicAdjustment +from compass.ocean.tests.global_ocean.analysis_test import AnalysisTest +from compass.ocean.tests.global_ocean.daily_output_test import DailyOutputTest +from compass.ocean.tests.global_ocean.decomp_test import DecompTest +from compass.ocean.tests.global_ocean.files_for_e3sm import FilesForE3SM from compass.ocean.tests.global_ocean.init import Init +from compass.ocean.tests.global_ocean.mesh import Mesh +from compass.ocean.tests.global_ocean.mesh.arrm10to60.dynamic_adjustment import ( # noqa: E501 + ARRM10to60DynamicAdjustment, +) +from compass.ocean.tests.global_ocean.mesh.ec30to60.dynamic_adjustment import ( + EC30to60DynamicAdjustment, +) +from compass.ocean.tests.global_ocean.mesh.qu240.dynamic_adjustment import ( + QU240DynamicAdjustment, +) +from compass.ocean.tests.global_ocean.mesh.so12to60.dynamic_adjustment import ( + SO12to60DynamicAdjustment, +) +from compass.ocean.tests.global_ocean.mesh.wc14.dynamic_adjustment import ( + WC14DynamicAdjustment, +) +from compass.ocean.tests.global_ocean.monthly_output_test import ( + MonthlyOutputTest, +) from compass.ocean.tests.global_ocean.performance_test import PerformanceTest from compass.ocean.tests.global_ocean.restart_test import RestartTest -from compass.ocean.tests.global_ocean.decomp_test import DecompTest from compass.ocean.tests.global_ocean.threads_test import ThreadsTest -from compass.ocean.tests.global_ocean.analysis_test import AnalysisTest -from compass.ocean.tests.global_ocean.daily_output_test import DailyOutputTest -from compass.ocean.tests.global_ocean.monthly_output_test import \ - MonthlyOutputTest -from compass.ocean.tests.global_ocean.files_for_e3sm import FilesForE3SM -from compass.ocean.tests.global_ocean.make_diagnostics_files import \ - MakeDiagnosticsFiles +from compass.testgroup import TestGroup class GlobalOcean(TestGroup): @@ -226,5 +229,5 @@ def __init__(self, mpas_core): test_group=self, mesh=mesh, init=init, dynamic_adjustment=dynamic_adjustment)) - # A test case for making diagnostics files from an existing mesh - self.add_test_case(MakeDiagnosticsFiles(test_group=self)) + # A test case for making E3SM support files from an existing mesh + self.add_test_case(FilesForE3SM(test_group=self)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/README b/compass/ocean/tests/global_ocean/files_for_e3sm/README index e5b9d79b20..71be5abef4 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/README +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/README @@ -4,13 +4,13 @@ only a subset of the files needed for E3SM support of a new ocean and sea-ice mesh. *********************************************************** -After running run.py, the directory assembled_files is populated with links. -The directory structure is identical to the E3SM inputdata and diagnostics -directories found here: +After running "compass run", the directory assembled_files is populated with +links. The directory structure is identical to the E3SM inputdata and +diagnostics directories found here: https://web.lcrc.anl.gov/public/e3sm/ E3SM members should NOT attempt to upload these files. The files produced here are only a subset of those needed to support a new mesh in E3SM and should not be uploaded on their own. E3SM experts who know how to produce the other coupling files may upload these files along with other required datasets to the -LCRC server. \ No newline at end of file +LCRC server. diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py index cd24e35d81..273f5ff94f 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py @@ -1,20 +1,31 @@ import os -from compass.io import symlink, package_path -from compass.testcase import TestCase -from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_initial_condition \ - import OceanInitialCondition -from compass.ocean.tests.global_ocean.files_for_e3sm.seaice_initial_condition \ - import SeaiceInitialCondition -from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_graph_partition \ - import OceanGraphPartition +from compass.io import package_path, symlink +from compass.ocean.tests.global_ocean.configure import configure_global_ocean +from compass.ocean.tests.global_ocean.files_for_e3sm.diagnostic_maps import ( + DiagnosticMaps, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.diagnostic_masks import ( + DiagnosticMasks, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.e3sm_to_cmip_maps import ( + E3smToCmipMaps, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_graph_partition import ( # noqa: E501 + OceanGraphPartition, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_initial_condition import ( # noqa: E501 + OceanInitialCondition, +) from compass.ocean.tests.global_ocean.files_for_e3sm.scrip import Scrip -from compass.ocean.tests.global_ocean.files_for_e3sm.e3sm_to_cmip_maps import \ - E3smToCmipMaps -from compass.ocean.tests.global_ocean.files_for_e3sm.diagnostics_files \ - import DiagnosticsFiles +from compass.ocean.tests.global_ocean.files_for_e3sm.seaice_graph_partition import ( # noqa: E501 + SeaiceGraphPartition, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.seaice_initial_condition import ( # noqa: E501 + SeaiceInitialCondition, +) from compass.ocean.tests.global_ocean.forward import get_forward_subdir -from compass.ocean.tests.global_ocean.configure import configure_global_ocean +from compass.testcase import TestCase class FilesForE3SM(TestCase): @@ -34,12 +45,9 @@ class FilesForE3SM(TestCase): dynamic_adjustment : compass.ocean.tests.global_ocean.dynamic_adjustment.DynamicAdjustment The test case that performs dynamic adjustment to dissipate fast-moving waves from the initial condition - - restart_filename : str - A restart file from the end of the dynamic adjustment test case to use - as the basis for an E3SM initial condition - """ - def __init__(self, test_group, mesh, init, dynamic_adjustment): + """ # noqa: E501 + def __init__(self, test_group, mesh=None, init=None, + dynamic_adjustment=None): """ Create test case for creating a global MPAS-Ocean mesh @@ -48,62 +56,69 @@ def __init__(self, test_group, mesh, init, dynamic_adjustment): test_group : compass.ocean.tests.global_ocean.GlobalOcean The global ocean test group that this test case belongs to - mesh : compass.ocean.tests.global_ocean.mesh.Mesh + mesh : compass.ocean.tests.global_ocean.mesh.Mesh, optional The test case that produces the mesh for this run - init : compass.ocean.tests.global_ocean.init.Init + init : compass.ocean.tests.global_ocean.init.Init, optional The test case that produces the initial condition for this run - dynamic_adjustment : compass.ocean.tests.global_ocean.dynamic_adjustment.DynamicAdjustment + dynamic_adjustment : compass.ocean.tests.global_ocean.dynamic_adjustment.DynamicAdjustment, optional The test case that performs dynamic adjustment to dissipate fast-moving waves from the initial condition - """ + """ # noqa: E501 name = 'files_for_e3sm' - time_integrator = dynamic_adjustment.time_integrator - subdir = get_forward_subdir(init.init_subdir, time_integrator, name) + if dynamic_adjustment is not None: + time_integrator = dynamic_adjustment.time_integrator + subdir = get_forward_subdir( + init.init_subdir, time_integrator, name) + else: + subdir = name super().__init__(test_group=test_group, name=name, subdir=subdir) self.mesh = mesh self.init = init self.dynamic_adjustment = dynamic_adjustment - restart_filename = os.path.join( - '..', 'dynamic_adjustment', - dynamic_adjustment.restart_filenames[-1]) - self.restart_filename = restart_filename - - self.add_step( - OceanInitialCondition(test_case=self, - restart_filename=restart_filename)) - - self.add_step( - OceanGraphPartition(test_case=self, mesh=mesh, - restart_filename=restart_filename)) - - self.add_step( - SeaiceInitialCondition( - test_case=self, restart_filename=restart_filename, - with_ice_shelf_cavities=mesh.with_ice_shelf_cavities)) - - self.add_step( - Scrip( - test_case=self, restart_filename=restart_filename, - with_ice_shelf_cavities=mesh.with_ice_shelf_cavities)) - - self.add_step( - E3smToCmipMaps( - test_case=self, restart_filename=restart_filename)) - - self.add_step( - DiagnosticsFiles( - test_case=self, restart_filename=restart_filename, - with_ice_shelf_cavities=mesh.with_ice_shelf_cavities)) + self.add_step(OceanInitialCondition(test_case=self)) + self.add_step(OceanGraphPartition(test_case=self)) + self.add_step(SeaiceInitialCondition(test_case=self)) + self.add_step(SeaiceGraphPartition(test_case=self)) + self.add_step(Scrip(test_case=self)) + self.add_step(E3smToCmipMaps(test_case=self)) + self.add_step(DiagnosticMaps(test_case=self)) + self.add_step(DiagnosticMasks(test_case=self)) def configure(self): """ Modify the configuration options for this test case """ - configure_global_ocean(test_case=self, mesh=self.mesh, init=self.init) + mesh = self.mesh + init = self.init + dynamic_adjustment = self.dynamic_adjustment + config = self.config + work_dir = self.work_dir + + if mesh is not None: + configure_global_ocean(test_case=self, mesh=mesh, + init=init) package = 'compass.ocean.tests.global_ocean.files_for_e3sm' with package_path(package, 'README') as target: - symlink(str(target), '{}/README'.format(self.work_dir)) + symlink(str(target), f'{work_dir}/README') + + if mesh is not None: + config.set('files_for_e3sm', 'with_ice_shelf_cavities', + f'{mesh.with_ice_shelf_cavities}') + + mesh_path = mesh.get_cull_mesh_path() + graph_filename = os.path.join( + self.base_work_dir, mesh_path, 'culled_graph.info') + graph_filename = os.path.abspath(graph_filename) + config.set('files_for_e3sm', 'graph_filename', graph_filename) + + if dynamic_adjustment is not None: + restart_filename = os.path.join( + work_dir, '..', 'dynamic_adjustment', + dynamic_adjustment.restart_filenames[-1]) + restart_filename = os.path.abspath(restart_filename) + config.set('files_for_e3sm', 'ocean_restart_filename', + restart_filename) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostic_maps.py b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostic_maps.py new file mode 100644 index 0000000000..a1ed001d3e --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostic_maps.py @@ -0,0 +1,227 @@ +import glob +import os + +import numpy +import pyproj +from pyremap import ( + MpasMeshDescriptor, + ProjectionGridDescriptor, + Remapper, + get_lat_lon_descriptor, +) + +from compass.io import symlink +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) + + +class DiagnosticMaps(FilesForE3SMStep): + """ + A step for creating mapping files for use in MPAS-Analysis + """ + + def __init__(self, test_case): + """ + Create a step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + """ # noqa: E501 + + super().__init__(test_case, name='diagnostics_maps', ntasks=36, + min_tasks=1) + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + super().run() + + make_diagnostics_maps(self.config, self.logger, self.mesh_short_name, + self.ntasks) + + +def make_diagnostics_maps(config, logger, mesh_short_name, ntasks): + """ + Run this step of the testcase + + Parameters + ---------- + config : compass.config.CompassConfigParser + Configuration options for this test case + + logger : logging.Logger + A logger for output from the step + + mesh_short_name : str + The E3SM short name of the mesh + + ntasks : int + The number of cores to use to build mapping files + """ + link_dir = '../assembled_files/diagnostics/mpas_analysis/maps' + + try: + os.makedirs(link_dir) + except OSError: + pass + + _make_analysis_lat_lon_map(config, mesh_short_name, ntasks, logger) + for projection_name in ['antarctic', 'arctic', 'antarctic_extended', + 'arctic_extended', 'north_atlantic', + 'north_pacific', 'subpolar_north_atlantic']: + _make_analysis_projection_map(config, mesh_short_name, projection_name, + ntasks, logger) + + # make links in output directory + files = glob.glob('map_*') + + # make links in output directory + for filename in files: + symlink(os.path.abspath(filename), + f'{link_dir}/{filename}') + + +def _make_analysis_lat_lon_map(config, mesh_name, ntasks, logger): + mesh_filename = 'restart.nc' + + lat_res = config.getfloat('files_for_e3sm', 'comparisonLatResolution') + lon_res = config.getfloat('files_for_e3sm', 'comparisonLonResolution') + + # modify the resolution of the global lat-lon grid as desired + out_descriptor = get_lat_lon_descriptor(dLon=lat_res, + dLat=lon_res) + out_grid_name = out_descriptor.meshName + + _make_mapping_file(mesh_name, out_grid_name, mesh_filename, out_descriptor, + ntasks, config, logger) + + +# copied from MPAS-Analysis for now +def _get_pyproj_projection(comparison_grid_name): + """ + Get the projection from the comparison_grid_name. + Parameters + ---------- + comparison_grid_name : str + The name of the projection comparison grid to use for remapping + Returns + ------- + projection : pyproj.Proj + The projection + Raises + ------ + ValueError + If comparison_grid_name does not describe a known comparison grid + """ + + if comparison_grid_name == 'latlon': + raise ValueError('latlon is not a projection grid.') + elif comparison_grid_name in ['antarctic', 'antarctic_extended']: + projection = pyproj.Proj( + '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 ' + '+x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name in ['arctic', 'arctic_extended']: + projection = pyproj.Proj( + '+proj=stere +lat_ts=75.0 +lat_0=90 +lon_0=0.0 +k_0=1.0 ' + '+x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name == 'north_atlantic': + projection = pyproj.Proj('+proj=lcc +lon_0=-45 +lat_0=45 +lat_1=39 ' + '+lat_2=51 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name == 'north_pacific': + projection = pyproj.Proj('+proj=lcc +lon_0=180 +lat_0=40 +lat_1=34 ' + '+lat_2=46 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name == 'subpolar_north_atlantic': + projection = pyproj.Proj('+proj=lcc +lon_0=-40 +lat_0=54 +lat_1=40 ' + '+lat_2=68 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + else: + raise ValueError(f'We missed one of the known comparison grids: ' + f'{comparison_grid_name}') + + return projection + + +# A lot of duplication from MPAS-Analysis for now. +def _make_analysis_projection_map(config, mesh_name, projection_name, ntasks, + logger): + mesh_filename = 'restart.nc' + section = 'files_for_e3sm' + + option_suffixes = {'antarctic': 'AntarcticStereo', + 'arctic': 'ArcticStereo', + 'antarctic_extended': 'AntarcticExtended', + 'arctic_extended': 'ArcticExtended', + 'north_atlantic': 'NorthAtlantic', + 'north_pacific': 'NorthPacific', + 'subpolar_north_atlantic': 'SubpolarNorthAtlantic'} + + grid_suffixes = {'antarctic': 'Antarctic_stereo', + 'arctic': 'Arctic_stereo', + 'antarctic_extended': 'Antarctic_stereo', + 'arctic_extended': 'Arctic_stereo', + 'north_atlantic': 'North_Atlantic', + 'north_pacific': 'North_Pacific', + 'subpolar_north_atlantic': 'Subpolar_North_Atlantic'} + + projection = _get_pyproj_projection(projection_name) + option_suffix = option_suffixes[projection_name] + grid_suffix = grid_suffixes[projection_name] + + width = config.getfloat( + section, f'comparison{option_suffix}Width') + option = f'comparison{option_suffix}Height' + if config.has_option(section, option): + height = config.getfloat(section, option) + else: + height = width + res = config.getfloat( + section, f'comparison{option_suffix}Resolution') + + xmax = 0.5 * width * 1e3 + nx = int(width / res) + 1 + x = numpy.linspace(-xmax, xmax, nx) + + ymax = 0.5 * height * 1e3 + ny = int(height / res) + 1 + y = numpy.linspace(-ymax, ymax, ny) + + out_grid_name = f'{width}x{height}km_{res}km_{grid_suffix}' + out_descriptor = ProjectionGridDescriptor.create(projection, x, y, + mesh_name) + + _make_mapping_file(mesh_name, out_grid_name, mesh_filename, out_descriptor, + ntasks, config, logger) + + +def _make_mapping_file(mesh_name, out_grid_name, mesh_filename, out_descriptor, + ntasks, config, logger): + + parallel_executable = config.get('parallel', 'parallel_executable') + + in_descriptor = MpasMeshDescriptor(mesh_filename, mesh_name) + + mapping_file_name = f'map_{mesh_name}_to_{out_grid_name}_bilinear.nc' + + remapper = Remapper(in_descriptor, out_descriptor, mapping_file_name) + + remapper.build_mapping_file(method='bilinear', mpiTasks=ntasks, + tempdir='.', logger=logger, + esmf_parallel_exec=parallel_executable) + + # now the same on vertices (e.g. for streamfunctions) + in_descriptor = MpasMeshDescriptor(mesh_filename, mesh_name, vertices=True) + mapping_file_name = \ + f'map_{mesh_name}_vertices_to_{out_grid_name}_bilinear.nc' + + remapper = Remapper(in_descriptor, out_descriptor, mapping_file_name) + + remapper.build_mapping_file(method='bilinear', mpiTasks=ntasks, + tempdir='.', logger=logger, + esmf_parallel_exec=parallel_executable) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostic_masks.py b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostic_masks.py new file mode 100644 index 0000000000..2ad66baf05 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostic_masks.py @@ -0,0 +1,213 @@ +import os + +import mpas_tools.io +import xarray as xr +from geometric_features import GeometricFeatures +from geometric_features.aggregation import get_aggregator_by_name +from mpas_tools.io import write_netcdf +from mpas_tools.logging import check_call +from mpas_tools.ocean.moc import add_moc_southern_boundary_transects + +from compass.io import symlink +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) + + +class DiagnosticMasks(FilesForE3SMStep): + """ + A step for creating region masks needed for the Meridional Overturning + Circulation analysis member and diagnostics from MPAS-Analysis + """ + + def __init__(self, test_case): + """ + Create a step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + """ # noqa: E501 + + super().__init__(test_case, name='diagnostics_masks', cpus_per_task=18, + min_cpus_per_task=1) + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + super().run() + + make_diagnostics_files(self.logger, self.mesh_short_name, + self.with_ice_shelf_cavities, + self.cpus_per_task) + + +def make_diagnostics_files(logger, mesh_short_name, with_ice_shelf_cavities, + cpus_per_task): + """ + Run this step of the testcase + + Parameters + ---------- + logger : logging.Logger + A logger for output from the step + + mesh_short_name : str + The E3SM short name of the mesh + + with_ice_shelf_cavities : bool + Whether the mesh has ice-shelf cavities + + cpus_per_task : int + The number of cores to use to build masks + """ + + mask_dir = '../assembled_files/diagnostics/mpas_analysis/region_masks' + try: + os.makedirs(mask_dir) + except FileExistsError: + pass + + ocean_inputdata_dir = \ + f'../assembled_files/inputdata/ocn/mpas-o/{mesh_short_name}' + moc_mask_dirs = [mask_dir, ocean_inputdata_dir] + + _make_moc_masks(mesh_short_name, logger, cpus_per_task, moc_mask_dirs) + + gf = GeometricFeatures() + region_groups = ['Antarctic Regions', 'Arctic Ocean Regions', + 'Arctic Sea Ice Regions', 'Ocean Basins', + 'Ocean Subbasins', 'ISMIP6 Regions'] + + if with_ice_shelf_cavities: + region_groups.append('Ice Shelves') + + for region_group in region_groups: + function, prefix, date = get_aggregator_by_name(region_group) + suffix = f'{prefix}{date}' + fc_mask = function(gf) + _make_region_masks(mesh_short_name, suffix=suffix, fc_mask=fc_mask, + logger=logger, cpus_per_task=cpus_per_task, + output_dir=mask_dir) + + transect_groups = ['Transport Transects'] + for transect_group in transect_groups: + function, prefix, date = get_aggregator_by_name(transect_group) + suffix = f'{prefix}{date}' + fc_mask = function(gf) + _make_transect_masks(mesh_short_name, suffix=suffix, fc_mask=fc_mask, + logger=logger, cpus_per_task=cpus_per_task, + output_dir=mask_dir) + + +def _make_region_masks(mesh_name, suffix, fc_mask, logger, cpus_per_task, + output_dir): + mesh_filename = 'restart.nc' + + geojson_filename = f'{suffix}.geojson' + mask_filename = f'{mesh_name}_{suffix}.nc' + + fc_mask.to_geojson(geojson_filename) + + # these defaults may have been updated from config options -- pass them + # along to the subprocess + netcdf_format = mpas_tools.io.default_format + netcdf_engine = mpas_tools.io.default_engine + + args = ['compute_mpas_region_masks', + '-m', mesh_filename, + '-g', geojson_filename, + '-o', mask_filename, + '-t', 'cell', + '--process_count', f'{cpus_per_task}', + '--format', netcdf_format, + '--engine', netcdf_engine] + check_call(args, logger=logger) + + # make links in output directory + symlink(os.path.abspath(mask_filename), + f'{output_dir}/{mask_filename}') + + +def _make_transect_masks(mesh_name, suffix, fc_mask, logger, cpus_per_task, + output_dir, subdivision_threshold=10e3): + mesh_filename = 'restart.nc' + + geojson_filename = f'{suffix}.geojson' + mask_filename = f'{mesh_name}_{suffix}.nc' + + fc_mask.to_geojson(geojson_filename) + + # these defaults may have been updated from config options -- pass them + # along to the subprocess + netcdf_format = mpas_tools.io.default_format + netcdf_engine = mpas_tools.io.default_engine + + args = ['compute_mpas_transect_masks', + '-m', mesh_filename, + '-g', geojson_filename, + '-o', mask_filename, + '-t', 'edge', + '-s', f'{subdivision_threshold}', + '--process_count', f'{cpus_per_task}', + '--add_edge_sign', + '--format', netcdf_format, + '--engine', netcdf_engine] + check_call(args, logger=logger) + + symlink(os.path.abspath(mask_filename), + f'{output_dir}/{mask_filename}') + + +def _make_moc_masks(mesh_short_name, logger, cpus_per_task, moc_mask_dirs): + gf = GeometricFeatures() + + mesh_filename = 'restart.nc' + + function, prefix, date = get_aggregator_by_name('MOC Basins') + fc_mask = function(gf) + + suffix = f'{prefix}{date}' + + geojson_filename = f'{suffix}.geojson' + mask_filename = f'{mesh_short_name}_{suffix}.nc' + + fc_mask.to_geojson(geojson_filename) + + # these defaults may have been updated from config options -- pass them + # along to the subprocess + netcdf_format = mpas_tools.io.default_format + netcdf_engine = mpas_tools.io.default_engine + + args = ['compute_mpas_region_masks', + '-m', mesh_filename, + '-g', geojson_filename, + '-o', mask_filename, + '-t', 'cell', + '--process_count', f'{cpus_per_task}', + '--format', netcdf_format, + '--engine', netcdf_engine] + check_call(args, logger=logger) + + mask_and_transect_filename = \ + f'{mesh_short_name}_mocBasinsAndTransects{date}.nc' + + ds_mesh = xr.open_dataset(mesh_filename) + ds_mask = xr.open_dataset(mask_filename) + + ds_masks_and_transects = add_moc_southern_boundary_transects( + ds_mask, ds_mesh, logger=logger) + + write_netcdf(ds_masks_and_transects, mask_and_transect_filename, + char_dim_name='StrLen') + + # make links in output directories (both inputdata and diagnostics) + for output_dir in moc_mask_dirs: + symlink(os.path.abspath(mask_and_transect_filename), + f'{output_dir}/{mask_and_transect_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostics_files.py b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostics_files.py deleted file mode 100644 index 2c9962953b..0000000000 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostics_files.py +++ /dev/null @@ -1,390 +0,0 @@ -import os -import xarray -import glob -import pyproj -import numpy - -from pyremap import get_lat_lon_descriptor, ProjectionGridDescriptor, \ - MpasMeshDescriptor, Remapper -from geometric_features import GeometricFeatures -from geometric_features.aggregation import get_aggregator_by_name -from mpas_tools.logging import check_call -from mpas_tools.ocean.moc import add_moc_southern_boundary_transects -from mpas_tools.io import write_netcdf -import mpas_tools.io - -from compass.io import symlink -from compass.step import Step - - -class DiagnosticsFiles(Step): - """ - A step for creating files needed for the Meridional Overturning Circulation - analysis member and diagnostics from MPAS-Analysis - - Attributes - ---------- - with_ice_shelf_cavities : bool - Whether the mesh includes ice-shelf cavities - """ - - def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): - """ - Create a step - - Parameters - ---------- - test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM - The test case this step belongs to - - restart_filename : str - A restart file from the end of the dynamic adjustment test case to - use as the basis for an E3SM initial condition - - with_ice_shelf_cavities : bool - Whether the mesh includes ice-shelf cavities - """ - - super().__init__(test_case, name='diagnostics_files', cpus_per_task=18, - min_cpus_per_task=1, openmp_threads=1) - - self.add_input_file(filename='README', target='../README') - self.add_input_file(filename='restart.nc', - target=f'../{restart_filename}') - - self.with_ice_shelf_cavities = with_ice_shelf_cavities - - # for now, we won't define any outputs because they include the mesh - # short name, which is not known at setup time. Currently, this is - # safe because no other steps depend on the outputs of this one. - - def run(self): - """ - Run this step of the testcase - """ - with xarray.open_dataset('restart.nc') as ds: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - - make_diagnostics_files(self.config, self.logger, mesh_short_name, - self.with_ice_shelf_cavities, - self.cpus_per_task) - - -def make_diagnostics_files(config, logger, mesh_short_name, - with_ice_shelf_cavities, cores): - """ - Run this step of the testcase - - Parameters - ---------- - config : compass.config.CompassConfigParser - Configuration options for this test case - - logger : logging.Logger - A logger for output from the step - - mesh_short_name : str - The E3SM short name of the mesh - - with_ice_shelf_cavities : bool - Whether the mesh has ice-shelf cavities - - cores : int - The number of cores to use to build mapping files - """ - - for directory in [ - f'../assembled_files/inputdata/ocn/mpas-o/{mesh_short_name}', - '../assembled_files/diagnostics/mpas_analysis/region_masks', - '../assembled_files/diagnostics/mpas_analysis/maps']: - try: - os.makedirs(directory) - except OSError: - pass - _make_moc_masks(mesh_short_name, logger, cores) - - gf = GeometricFeatures() - region_groups = ['Antarctic Regions', 'Arctic Ocean Regions', - 'Arctic Sea Ice Regions', 'Ocean Basins', - 'Ocean Subbasins', 'ISMIP6 Regions'] - - if with_ice_shelf_cavities: - region_groups.append('Ice Shelves') - - for region_group in region_groups: - function, prefix, date = get_aggregator_by_name(region_group) - suffix = f'{prefix}{date}' - fc_mask = function(gf) - _make_region_masks(mesh_short_name, suffix=suffix, fc_mask=fc_mask, - logger=logger, cores=cores) - - transect_groups = ['Transport Transects'] - for transect_group in transect_groups: - function, prefix, date = get_aggregator_by_name(transect_group) - suffix = f'{prefix}{date}' - fc_mask = function(gf) - _make_transect_masks(mesh_short_name, suffix=suffix, fc_mask=fc_mask, - logger=logger, cores=cores) - - _make_analysis_lat_lon_map(config, mesh_short_name, cores, logger) - for projection_name in ['antarctic', 'arctic', 'antarctic_extended', - 'arctic_extended', 'north_atlantic', - 'north_pacific', 'subpolar_north_atlantic']: - _make_analysis_projection_map(config, mesh_short_name, projection_name, - cores, logger) - - # make links in output directory - files = glob.glob('map_*') - - # make links in output directory - output_dir = '../assembled_files/diagnostics/mpas_analysis/maps' - for filename in files: - symlink(f'../../../../diagnostics_files/{filename}', - f'{output_dir}/{filename}') - - -def _make_region_masks(mesh_name, suffix, fc_mask, logger, cores): - mesh_filename = 'restart.nc' - - geojson_filename = f'{suffix}.geojson' - mask_filename = f'{mesh_name}_{suffix}.nc' - - fc_mask.to_geojson(geojson_filename) - - # these defaults may have been updated from config options -- pass them - # along to the subprocess - netcdf_format = mpas_tools.io.default_format - netcdf_engine = mpas_tools.io.default_engine - - args = ['compute_mpas_region_masks', - '-m', mesh_filename, - '-g', geojson_filename, - '-o', mask_filename, - '-t', 'cell', - '--process_count', f'{cores}', - '--format', netcdf_format, - '--engine', netcdf_engine] - check_call(args, logger=logger) - - # make links in output directory - output_dir = '../assembled_files/diagnostics/mpas_analysis/' \ - 'region_masks' - symlink(f'../../../../diagnostics_files/{mask_filename}', - f'{output_dir}/{mask_filename}') - - -def _make_transect_masks(mesh_name, suffix, fc_mask, logger, cores, - subdivision_threshold=10e3): - mesh_filename = 'restart.nc' - - geojson_filename = f'{suffix}.geojson' - mask_filename = f'{mesh_name}_{suffix}.nc' - - fc_mask.to_geojson(geojson_filename) - - # these defaults may have been updated from config options -- pass them - # along to the subprocess - netcdf_format = mpas_tools.io.default_format - netcdf_engine = mpas_tools.io.default_engine - - args = ['compute_mpas_transect_masks', - '-m', mesh_filename, - '-g', geojson_filename, - '-o', mask_filename, - '-t', 'edge', - '-s', f'{subdivision_threshold}', - '--process_count', f'{cores}', - '--add_edge_sign', - '--format', netcdf_format, - '--engine', netcdf_engine] - check_call(args, logger=logger) - - # make links in output directory - output_dir = '../assembled_files/diagnostics/mpas_analysis/' \ - 'region_masks' - symlink(f'../../../../diagnostics_files/{mask_filename}', - f'{output_dir}/{mask_filename}') - - -def _make_analysis_lat_lon_map(config, mesh_name, cores, logger): - mesh_filename = 'restart.nc' - - in_descriptor = MpasMeshDescriptor(mesh_filename, mesh_name) - - lat_res = config.getfloat('files_for_e3sm', 'comparisonLatResolution') - lon_res = config.getfloat('files_for_e3sm', 'comparisonLonResolution') - - # modify the resolution of the global lat-lon grid as desired - out_descriptor = get_lat_lon_descriptor(dLon=lat_res, - dLat=lon_res) - out_grid_name = out_descriptor.meshName - - _make_mapping_file(mesh_name, out_grid_name, in_descriptor, out_descriptor, - cores, config, logger) - - -# copied from MPAS-Analysis for now -def _get_pyproj_projection(comparison_grid_name): - """ - Get the projection from the comparison_grid_name. - Parameters - ---------- - comparison_grid_name : str - The name of the projection comparison grid to use for remapping - Returns - ------- - projection : pyproj.Proj - The projection - Raises - ------ - ValueError - If comparison_grid_name does not describe a known comparison grid - """ - - if comparison_grid_name == 'latlon': - raise ValueError('latlon is not a projection grid.') - elif comparison_grid_name in ['antarctic', 'antarctic_extended']: - projection = pyproj.Proj( - '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 ' - '+x_0=0.0 +y_0=0.0 +ellps=WGS84') - elif comparison_grid_name in ['arctic', 'arctic_extended']: - projection = pyproj.Proj( - '+proj=stere +lat_ts=75.0 +lat_0=90 +lon_0=0.0 +k_0=1.0 ' - '+x_0=0.0 +y_0=0.0 +ellps=WGS84') - elif comparison_grid_name == 'north_atlantic': - projection = pyproj.Proj('+proj=lcc +lon_0=-45 +lat_0=45 +lat_1=39 ' - '+lat_2=51 +x_0=0.0 +y_0=0.0 +ellps=WGS84') - elif comparison_grid_name == 'north_pacific': - projection = pyproj.Proj('+proj=lcc +lon_0=180 +lat_0=40 +lat_1=34 ' - '+lat_2=46 +x_0=0.0 +y_0=0.0 +ellps=WGS84') - elif comparison_grid_name == 'subpolar_north_atlantic': - projection = pyproj.Proj('+proj=lcc +lon_0=-40 +lat_0=54 +lat_1=40 ' - '+lat_2=68 +x_0=0.0 +y_0=0.0 +ellps=WGS84') - else: - raise ValueError(f'We missed one of the known comparison grids: ' - f'{comparison_grid_name}') - - return projection - - -# A lot of duplication from MPAS-Analysis for now. -def _make_analysis_projection_map(config, mesh_name, projection_name, cores, - logger): - mesh_filename = 'restart.nc' - section = 'files_for_e3sm' - - option_suffixes = {'antarctic': 'AntarcticStereo', - 'arctic': 'ArcticStereo', - 'antarctic_extended': 'AntarcticExtended', - 'arctic_extended': 'ArcticExtended', - 'north_atlantic': 'NorthAtlantic', - 'north_pacific': 'NorthPacific', - 'subpolar_north_atlantic': 'SubpolarNorthAtlantic'} - - grid_suffixes = {'antarctic': 'Antarctic_stereo', - 'arctic': 'Arctic_stereo', - 'antarctic_extended': 'Antarctic_stereo', - 'arctic_extended': 'Arctic_stereo', - 'north_atlantic': 'North_Atlantic', - 'north_pacific': 'North_Pacific', - 'subpolar_north_atlantic': 'Subpolar_North_Atlantic'} - - projection = _get_pyproj_projection(projection_name) - option_suffix = option_suffixes[projection_name] - grid_suffix = grid_suffixes[projection_name] - - in_descriptor = MpasMeshDescriptor(mesh_filename, mesh_name) - - width = config.getfloat( - section, f'comparison{option_suffix}Width') - option = f'comparison{option_suffix}Height' - if config.has_option(section, option): - height = config.getfloat(section, option) - else: - height = width - res = config.getfloat( - section, f'comparison{option_suffix}Resolution') - - xmax = 0.5 * width * 1e3 - nx = int(width / res) + 1 - x = numpy.linspace(-xmax, xmax, nx) - - ymax = 0.5 * height * 1e3 - ny = int(height / res) + 1 - y = numpy.linspace(-ymax, ymax, ny) - - out_grid_name = f'{width}x{height}km_{res}km_{grid_suffix}' - out_descriptor = ProjectionGridDescriptor.create(projection, x, y, - mesh_name) - - _make_mapping_file(mesh_name, out_grid_name, in_descriptor, out_descriptor, - cores, config, logger) - - -def _make_mapping_file(mesh_name, out_grid_name, in_descriptor, out_descriptor, - cores, config, logger): - - parallel_executable = config.get('parallel', 'parallel_executable') - - mapping_file_name = f'map_{mesh_name}_to_{out_grid_name}_bilinear.nc' - - remapper = Remapper(in_descriptor, out_descriptor, mapping_file_name) - - remapper.build_mapping_file(method='bilinear', mpiTasks=cores, tempdir='.', - logger=logger, - esmf_parallel_exec=parallel_executable) - - -def _make_moc_masks(mesh_short_name, logger, cores): - gf = GeometricFeatures() - - mesh_filename = 'restart.nc' - - function, prefix, date = get_aggregator_by_name('MOC Basins') - fc_mask = function(gf) - - suffix = f'{prefix}{date}' - - geojson_filename = f'{suffix}.geojson' - mask_filename = f'{mesh_short_name}_{suffix}.nc' - - fc_mask.to_geojson(geojson_filename) - - # these defaults may have been updated from config options -- pass them - # along to the subprocess - netcdf_format = mpas_tools.io.default_format - netcdf_engine = mpas_tools.io.default_engine - - args = ['compute_mpas_region_masks', - '-m', mesh_filename, - '-g', geojson_filename, - '-o', mask_filename, - '-t', 'cell', - '--process_count', f'{cores}', - '--format', netcdf_format, - '--engine', netcdf_engine] - check_call(args, logger=logger) - - mask_and_transect_filename = \ - f'{mesh_short_name}_mocBasinsAndTransects{date}.nc' - - ds_mesh = xarray.open_dataset(mesh_filename) - ds_mask = xarray.open_dataset(mask_filename) - - ds_masks_and_transects = add_moc_southern_boundary_transects( - ds_mask, ds_mesh, logger=logger) - - write_netcdf(ds_masks_and_transects, mask_and_transect_filename, - char_dim_name='StrLen') - - # make links in output directories (both inputdata and diagnostics) - output_dir = f'../assembled_files/inputdata/ocn/mpas-o/{mesh_short_name}' - symlink( - f'../../../../../diagnostics_files/{mask_and_transect_filename}', - f'{output_dir}/{mask_and_transect_filename}') - - output_dir = '../assembled_files/diagnostics/mpas_analysis/' \ - 'region_masks' - symlink( - f'../../../../diagnostics_files/{mask_and_transect_filename}', - f'{output_dir}/{mask_and_transect_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/e3sm_to_cmip_maps.py b/compass/ocean/tests/global_ocean/files_for_e3sm/e3sm_to_cmip_maps.py index 8d7ec31bc4..57c565a042 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/e3sm_to_cmip_maps.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/e3sm_to_cmip_maps.py @@ -1,18 +1,19 @@ import os -import xarray from mpas_tools.logging import check_call from compass.io import symlink -from compass.step import Step +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) -class E3smToCmipMaps(Step): +class E3smToCmipMaps(FilesForE3SMStep): """ A step for creating mapping files from the MPAS-Ocean mesh to a standard CMIP6 mesh """ - def __init__(self, test_case, restart_filename): + def __init__(self, test_case): """ Create a new step @@ -20,18 +21,10 @@ def __init__(self, test_case, restart_filename): ---------- test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM The test case this step belongs to - - restart_filename : str - A restart file from the end of the dynamic adjustment test case to - use as the basis for an E3SM initial condition - """ + """ # noqa: E501 super().__init__(test_case, name='e3sm_to_cmip_maps', ntasks=36, - min_tasks=1, openmp_threads=1) - - self.add_input_file(filename='README', target='../README') - self.add_input_file(filename='restart.nc', - target=f'../{restart_filename}') + min_tasks=1) self.add_input_file(filename='ocean.scrip.nc', target='../scrip/ocean.scrip.nc') @@ -55,18 +48,13 @@ def run(self): """ Run this step of the testcase """ - with xarray.open_dataset('restart.nc') as ds: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] - prefix = f'MPAS_Mesh_{mesh_prefix}' - creation_date = ds.attrs[f'{prefix}_Version_Creation_Date'] - - make_e3sm_to_cmip_maps(self.config, self.logger, mesh_short_name, - creation_date, self.subdir, self.ntasks) + super().run() + make_e3sm_to_cmip_maps(self.config, self.logger, self.mesh_short_name, + self.creation_date, self.ntasks) def make_e3sm_to_cmip_maps(config, logger, mesh_short_name, creation_date, - subdir, ntasks): + ntasks): """ Make mapping file from the MPAS-Ocean mesh to the CMIP6 grid @@ -84,27 +72,23 @@ def make_e3sm_to_cmip_maps(config, logger, mesh_short_name, creation_date, creation_date : str The date to append to the mapping files - subdir : str - The subdirectory this function is run from, for symlinking into - ``assembled_files`` - ntasks : int The number of parallel tasks to use for remapping """ - link_dir = f'../assembled_files/diagnostics/maps' + link_dir = '../assembled_files/diagnostics/maps' try: os.makedirs(link_dir) - except OSError: + except FileExistsError: pass src_scrip_filename = 'ocean.scrip.nc' cmip6_grid_res = config.get('files_for_e3sm', 'cmip6_grid_res') if cmip6_grid_res == '180x360': - dst_scrip_filename = f'cmip6_180x360_scrip.20181001.nc' + dst_scrip_filename = 'cmip6_180x360_scrip.20181001.nc' elif cmip6_grid_res == '720x1440': - dst_scrip_filename = f'cmip6_720x1440_scrip.20181001.nc' + dst_scrip_filename = 'cmip6_720x1440_scrip.20181001.nc' else: raise ValueError(f'Unexpected cmip6_grid_res: {cmip6_grid_res}') @@ -132,7 +116,7 @@ def make_e3sm_to_cmip_maps(config, logger, mesh_short_name, creation_date, check_call(args, logger=logger) map_filename = \ - f'map_{mesh_short_name}_to_cmip6_{cmip6_grid_res}_{suffix}.{creation_date}.nc' + f'map_{mesh_short_name}_to_cmip6_{cmip6_grid_res}_{suffix}.{creation_date}.nc' # noqa: E501 - symlink(f'../../../{subdir}/{local_map_filename}', + symlink(os.path.abspath(local_map_filename), f'{link_dir}/{map_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/files_for_e3sm_step.py b/compass/ocean/tests/global_ocean/files_for_e3sm/files_for_e3sm_step.py new file mode 100644 index 0000000000..26c50c9d02 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/files_for_e3sm_step.py @@ -0,0 +1,173 @@ +import os +from datetime import datetime + +import xarray as xr + +from compass.io import symlink +from compass.step import Step + + +class FilesForE3SMStep(Step): + """ + A superclass for steps in the FilesForE3SM test case + + Attributes + ---------- + mesh_short_name : str + The E3SM short name of the mesh + + creation_date : str + The creation date of the mesh in YYYYMMDD format + + ocean_inputdata_dir : str + The relative path to the ocean inputdata directory for this mesh + + seaice_inputdata_dir : str + The relative path to the sea-ice inputdata directory for this mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + def __init__(self, test_case, name, subdir=None, cpus_per_task=1, + min_cpus_per_task=1, ntasks=1, min_tasks=1,): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + name : str + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cpus_per_task : int, optional + the number of cores per task the step would ideally use. If + fewer cores per node are available on the system, the step will + run on all available cores as long as this is not below + ``min_cpus_per_task`` + + min_cpus_per_task : int, optional + the number of cores per task the step requires. If the system + has fewer than this number of cores per node, the step will fail + + ntasks : int, optional + the number of tasks the step would ideally use. If too few + cores are available on the system to accommodate the number of + tasks and the number of cores per task, the step will run on + fewer tasks as long as as this is not below ``min_tasks`` + + min_tasks : int, optional + the number of tasks the step requires. If the system has too + few cores to accommodate the number of tasks and cores per task, + the step will fail + + """ + super().__init__(test_case=test_case, name=name, subdir=subdir, + cpus_per_task=cpus_per_task, + min_cpus_per_task=min_cpus_per_task, ntasks=ntasks, + min_tasks=min_tasks) + + self.mesh_short_name = None + self.creation_date = None + self.ocean_inputdata_dir = None + self.seaice_inputdata_dir = None + self.with_ice_shelf_cavities = None + + def setup(self): + """ + setup input files based on config options + """ + self.add_input_file(filename='README', target='../README') + + restart_filename = self.config.get('files_for_e3sm', + 'ocean_restart_filename') + if restart_filename != 'autodetect': + self.add_input_file(filename='restart.nc', target=restart_filename) + + with_ice_shelf_cavities = self.config.get('files_for_e3sm', + 'with_ice_shelf_cavities') + if with_ice_shelf_cavities != 'autodetect': + self.with_ice_shelf_cavities = \ + (with_ice_shelf_cavities.lower() == 'true') + + def run(self): # noqa: C901 + """ + Run this step of the testcase + """ + config = self.config + if not os.path.exists('restart.nc'): + restart_filename = config.get('files_for_e3sm', + 'ocean_restart_filename') + if restart_filename == 'autodetect': + raise ValueError('No ocean restart file was provided in the ' + 'ocean_restart_filename config option.') + restart_filename = os.path.normpath(os.path.join( + self.test_case.work_dir, restart_filename)) + if not os.path.exists(restart_filename): + raise FileNotFoundError( + 'The ocean restart file given in ocean_restart_filename ' + 'could not be found.') + if restart_filename != 'restart.nc': + symlink(restart_filename, 'restart.nc') + + mesh_short_name = config.get('files_for_e3sm', 'mesh_short_name') + creation_date = config.get('global_ocean', 'creation_date') + with xr.open_dataset('restart.nc') as ds: + if 'MPAS_Mesh_Short_Name' in ds.attrs: + if mesh_short_name == 'autodetect': + mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] + if creation_date == 'autodetect': + # search for the creation date attribute + for attr in ds.attrs: + if attr.startswith('MPAS_Mesh') and \ + attr.endswith('Version_Creation_Date'): + creation_date = ds.attrs[attr] + # convert to the desired format + try: + date = datetime.strptime(creation_date, + '%m/%d/%Y %H:%M:%S') + creation_date = date.strftime("%Y%m%d") + except ValueError: + # creation date isn't in this old format, so + # assume it's already YYYYMMDD + pass + break + + if mesh_short_name == 'autodetect': + raise ValueError( + 'No mesh short name provided in "mesh_short_name" config ' + 'option and none found in MPAS_Mesh_Short_Name attribute.') + + if creation_date == 'autodetect': + now = datetime.now() + creation_date = now.strftime("%Y%m%d") + config.set('global_ocean', 'creation_date', creation_date) + + if self.with_ice_shelf_cavities is None: + with_ice_shelf_cavities = self.config.get( + 'files_for_e3sm', 'with_ice_shelf_cavities') + if with_ice_shelf_cavities == 'autodetect': + self.with_ice_shelf_cavities = 'wISC' in mesh_short_name + else: + self.with_ice_shelf_cavities = \ + (with_ice_shelf_cavities.lower() == 'true') + + self.mesh_short_name = mesh_short_name + self.creation_date = creation_date + + self.ocean_inputdata_dir = \ + f'../assembled_files/inputdata/ocn/mpas-o/{mesh_short_name}' + + self.seaice_inputdata_dir = \ + f'../assembled_files/inputdata/ice/mpas-seaice/{mesh_short_name}' + + for dest_dir in [self.ocean_inputdata_dir, self.seaice_inputdata_dir]: + try: + os.makedirs(dest_dir) + except FileExistsError: + pass diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/graph_partition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/graph_partition.py new file mode 100644 index 0000000000..a939faf873 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/graph_partition.py @@ -0,0 +1,40 @@ +import numpy as np + + +def get_core_list(ncells, max_cells_per_core=6000, min_cells_per_core=100): + """ + Get a fairly exhaustive list of core counts to partition a given number of + cells into + + Parameters + ---------- + ncells : int + The number of cells in the mesh + + max_cells_per_core : float, optional + the approximate maximum number of cells per core (use do determine + the minimum number of cores allowed) + + min_cells_per_core : float, optional + the approximate minimum number of cells per core (use do determine + the maximum number of cores allowed) + + Returns + ------- + cores : numpy.ndarray + Likely numbers of cores to run with + """ + min_graph_size = int(ncells / max_cells_per_core) + max_graph_size = int(ncells / min_cells_per_core) + n_power2 = 2**np.arange(1, 21) + n_multiples12 = 12 * np.arange(1, 9) + + cores = n_power2 + for power10 in range(3): + cores = np.concatenate([cores, 10**power10 * n_multiples12]) + + mask = np.logical_and(cores >= min_graph_size, + cores <= max_graph_size) + cores = cores[mask] + + return cores diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py index 93a200fb61..a239e0ee79 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py @@ -1,20 +1,23 @@ import os -import xarray -import numpy as np from glob import glob +import numpy as np from mpas_tools.logging import check_call from compass.io import symlink -from compass.step import Step +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.graph_partition import ( + get_core_list, +) -class OceanGraphPartition(Step): +class OceanGraphPartition(FilesForE3SMStep): """ - A step for creating an E3SM ocean initial condition from the results of - a dynamic-adjustment process to dissipate fast waves + A step for creating graph partition files for the ocean mesh """ - def __init__(self, test_case, mesh, restart_filename): + def __init__(self, test_case): """ Create a new step @@ -22,73 +25,58 @@ def __init__(self, test_case, mesh, restart_filename): ---------- test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM The test case this step belongs to + """ # noqa: E501 - mesh : compass.ocean.tests.global_ocean.mesh.Mesh - The test case that creates the mesh used by this test case - - restart_filename : str - A restart file from the end of the dynamic adjustment test case to - use as the basis for an E3SM initial condition - """ - - super().__init__(test_case, name='ocean_graph_partition', ntasks=1, - min_tasks=1, openmp_threads=1) - - self.add_input_file(filename='README', target='../README') - self.add_input_file(filename='restart.nc', - target='../{}'.format(restart_filename)) - - mesh_path = mesh.get_cull_mesh_path() - self.add_input_file( - filename='graph.info', - work_dir_target='{}/culled_graph.info'.format(mesh_path)) + super().__init__(test_case, name='ocean_graph_partition') # for now, we won't define any outputs because they include the mesh # short name, which is not known at setup time. Currently, this is # safe because no other steps depend on the outputs of this one. + def setup(self): + """ + setup input files based on config options + """ + super().setup() + graph_filename = self.config.get('files_for_e3sm', 'graph_filename') + if graph_filename != 'autodetect': + self.add_input_file(filename='graph.info', target=graph_filename) + def run(self): """ Run this step of the testcase """ + super().run() logger = self.logger - - with xarray.open_dataset('restart.nc') as ds: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] - prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) - creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)] - - try: - os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format( - mesh_short_name)) - except OSError: - pass - - symlink('graph.info', 'mpas-o.graph.info.{}'.format(creation_date)) - - nCells = sum(1 for _ in open('graph.info')) - min_graph_size = int(nCells / 6000) - max_graph_size = int(nCells / 100) - logger.info('Creating graph files between {} and {}'.format( - min_graph_size, max_graph_size)) - n_power2 = 2**np.arange(1, 21) - n_multiples12 = 12 * np.arange(1, 9) - - n = n_power2 - for power10 in range(3): - n = np.concatenate([n, 10**power10 * n_multiples12]) - - for index in range(len(n)): - if min_graph_size <= n[index] <= max_graph_size: - args = ['gpmetis', 'mpas-o.graph.info.{}'.format(creation_date), - '{}'.format(n[index])] - check_call(args, logger) + config = self.config + creation_date = self.creation_date + + if not os.path.exists('graph.info'): + graph_filename = config.get('files_for_e3sm', 'graph_filename') + if graph_filename == 'autodetect': + raise ValueError('No graph file was provided in the ' + 'graph_filename config option.') + graph_filename = os.path.normpath(os.path.join( + self.test_case.work_dir, graph_filename)) + if not os.path.exists(graph_filename): + raise FileNotFoundError('The graph file given in ' + 'graph_filename could not be found.') + if graph_filename != 'graph.info': + symlink(graph_filename, 'graph.info') + + symlink('graph.info', f'mpas-o.graph.info.{creation_date}') + + ncells = sum(1 for _ in open('graph.info')) + cores = get_core_list(ncells=ncells) + logger.info(f'Creating graph files between {np.amin(cores)} and ' + f'{np.amax(cores)}') + for ncores in cores: + args = ['gpmetis', f'mpas-o.graph.info.{creation_date}', + f'{ncores}'] + check_call(args, logger) # create link in assembled files directory files = glob('mpas-o.graph.info.*') - dest_path = '../assembled_files/inputdata/ocn/mpas-o/{}'.format( - mesh_short_name) for file in files: - symlink('../../../../../ocean_graph_partition/{}'.format(file), - '{}/{}'.format(dest_path, file)) + symlink(os.path.abspath(file), + f'{self.ocean_inputdata_dir}/{file}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py index dd321e3b06..665db86731 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py @@ -1,18 +1,20 @@ import os -import xarray +import xarray from mpas_tools.io import write_netcdf from compass.io import symlink -from compass.step import Step +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) -class OceanInitialCondition(Step): +class OceanInitialCondition(FilesForE3SMStep): """ A step for creating an E3SM ocean initial condition from the results of a dynamic-adjustment process to dissipate fast waves """ - def __init__(self, test_case, restart_filename): + def __init__(self, test_case): """ Create a new step @@ -20,18 +22,9 @@ def __init__(self, test_case, restart_filename): ---------- test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM The test case this step belongs to + """ # noqa: E501 - restart_filename : str - A restart file from the end of the dynamic adjustment test case to - use as the basis for an E3SM initial condition - """ - - super().__init__(test_case, name='ocean_initial_condition', ntasks=1, - min_tasks=1, openmp_threads=1) - - self.add_input_file(filename='README', target='../README') - self.add_input_file(filename='restart.nc', - target='../{}'.format(restart_filename)) + super().__init__(test_case, name='ocean_initial_condition') # for now, we won't define any outputs because they include the mesh # short name, which is not known at setup time. Currently, this is @@ -41,27 +34,16 @@ def run(self): """ Run this step of the testcase """ - with xarray.open_dataset('restart.nc') as ds: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] - prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) - creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)] - - try: - os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format( - mesh_short_name)) - except OSError: - pass - + super().run() source_filename = 'restart.nc' - dest_filename = 'mpaso.{}.{}.nc'.format(mesh_short_name, creation_date) + dest_filename = f'mpaso.{self.mesh_short_name}.{self.creation_date}.nc' with xarray.open_dataset(source_filename) as ds: ds.load() - ds = ds.drop_vars('xtime') + if 'xtime' in ds.data_vars: + ds = ds.drop_vars('xtime') write_netcdf(ds, dest_filename) symlink( - '../../../../../ocean_initial_condition/{}'.format(dest_filename), - '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format( - mesh_short_name, dest_filename)) + os.path.abspath(dest_filename), + f'{self.ocean_inputdata_dir}/{dest_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py b/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py index a7526aeae5..7c0c4ee632 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py @@ -1,20 +1,18 @@ import os -import xarray from mpas_tools.scrip.from_mpas import scrip_from_mpas from compass.io import symlink -from compass.step import Step +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) -class Scrip(Step): +class Scrip(FilesForE3SMStep): """ A step for creating SCRIP files from the MPAS-Ocean mesh - - with_ice_shelf_cavities : bool - Whether the mesh includes ice-shelf cavities """ - def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): + def __init__(self, test_case): """ Create a new step @@ -22,47 +20,28 @@ def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): ---------- test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM The test case this step belongs to + """ # noqa: E501 - restart_filename : str - A restart file from the end of the dynamic adjustment test case to - use as the basis for an E3SM initial condition + super().__init__(test_case, name='scrip') - with_ice_shelf_cavities : bool - Whether the mesh includes ice-shelf cavities + def setup(self): """ - - super().__init__(test_case, name='scrip', ntasks=1, - min_tasks=1, openmp_threads=1) - - self.add_input_file(filename='README', target='../README') - self.add_input_file(filename='restart.nc', - target=f'../{restart_filename}') - - self.with_ice_shelf_cavities = with_ice_shelf_cavities - + setup input files based on config options + """ + super().setup() self.add_output_file(filename='ocean.scrip.nc') - - if with_ice_shelf_cavities: + with_ice_shelf_cavities = self.with_ice_shelf_cavities + if with_ice_shelf_cavities is not None and with_ice_shelf_cavities: self.add_output_file(filename='ocean.mask.scrip.nc') def run(self): """ Run this step of the testcase """ + super().run() with_ice_shelf_cavities = self.with_ice_shelf_cavities - - with xarray.open_dataset('restart.nc') as ds: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] - prefix = f'MPAS_Mesh_{mesh_prefix}' - creation_date = ds.attrs[f'{prefix}_Version_Creation_Date'] - - link_dir = f'../assembled_files/inputdata/ocn/mpas-o/{mesh_short_name}' - - try: - os.makedirs(link_dir) - except OSError: - pass + mesh_short_name = self.mesh_short_name + creation_date = self.creation_date if with_ice_shelf_cavities: nomask_str = '.nomask' @@ -75,8 +54,8 @@ def run(self): scrip_from_mpas('restart.nc', local_filename) - symlink(f'../../../../../scrip/{local_filename}', - f'{link_dir}/{scrip_filename}') + symlink(os.path.abspath(local_filename), + f'{self.ocean_inputdata_dir}/{scrip_filename}') if with_ice_shelf_cavities: local_filename = 'ocean.mask.scrip.nc' @@ -85,5 +64,5 @@ def run(self): scrip_from_mpas('restart.nc', local_filename, useLandIceMask=True) - symlink(f'../../../../../scrip/{local_filename}', - f'{link_dir}/{scrip_mask_filename}') + symlink(os.path.abspath(local_filename), + f'{self.ocean_inputdata_dir}//{scrip_mask_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_graph_partition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_graph_partition.py new file mode 100644 index 0000000000..293ed3dc7e --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_graph_partition.py @@ -0,0 +1,89 @@ +import os +from glob import glob + +import numpy as np +import xarray as xr +from mpas_tools.logging import check_call + +from compass.io import symlink +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) +from compass.ocean.tests.global_ocean.files_for_e3sm.graph_partition import ( + get_core_list, +) + + +class SeaiceGraphPartition(FilesForE3SMStep): + """ + A step for creating graph partition files for the sea-ice mesh + """ + def __init__(self, test_case): + """ + Create a new step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + """ # noqa: E501 + + super().__init__(test_case, name='seaice_graph_partition') + + for filename in ['icePresent_QU60km_polar.nc', + 'seaice_QU60km_polar.nc']: + self.add_input_file(filename=filename, + target=filename, + database='partition', + database_component='seaice') + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def setup(self): + """ + setup input files based on config options + """ + super().setup() + graph_filename = self.config.get('files_for_e3sm', 'graph_filename') + if graph_filename != 'autodetect': + self.add_input_file(filename='graph.info', target=graph_filename) + + def run(self): + """ + Run this step of the testcase + """ + super().run() + logger = self.logger + creation_date = self.creation_date + + with xr.open_dataset('restart.nc') as ds: + ncells = ds.sizes['nCells'] + + cores = get_core_list(ncells=ncells) + logger.info(f'Creating graph files between {np.amin(cores)} and ' + f'{np.amax(cores)}') + + args = ['prepare_seaice_partitions', + '-i', 'seaice_QU60km_polar.nc', + '-p', 'icePresent_QU60km_polar.nc', + '-m', 'restart.nc', + '-o', '.'] + check_call(args, logger) + + args = ['create_seaice_partitions', + '-m', 'restart.nc', + '-o', '.', + '-p', f'mpas-seaice.graph.info.{creation_date}', + '-g', 'gpmetis', + '--plotting', + '-n'] + args = args + [f'{ncores}' for ncores in cores] + check_call(args, logger) + + # create link in assembled files directory + files = glob('mpas-seaice.graph.info.*') + for file in files: + symlink(os.path.abspath(file), + f'{self.seaice_inputdata_dir}/{file}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py index dae8625906..ddf1664e7b 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py @@ -1,21 +1,20 @@ import os -import xarray +import xarray from mpas_tools.io import write_netcdf from compass.io import symlink -from compass.step import Step +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) -class SeaiceInitialCondition(Step): +class SeaiceInitialCondition(FilesForE3SMStep): """ A step for creating an E3SM sea-ice initial condition from variables from an MPAS-Ocean restart file - - with_ice_shelf_cavities : bool - Whether the mesh includes ice-shelf cavities """ - def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): + def __init__(self, test_case): """ Create a new step @@ -23,23 +22,9 @@ def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): ---------- test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM The test case this step belongs to + """ # noqa: E501 - restart_filename : str - A restart file from the end of the dynamic adjustment test case to - use as the basis for an E3SM initial condition - - with_ice_shelf_cavities : bool - Whether the mesh includes ice-shelf cavities - """ - - super().__init__(test_case, name='seaice_initial_condition', ntasks=1, - min_tasks=1, openmp_threads=1) - - self.add_input_file(filename='README', target='../README') - self.add_input_file(filename='restart.nc', - target=f'../{restart_filename}') - - self.with_ice_shelf_cavities = with_ice_shelf_cavities + super().__init__(test_case, name='seaice_initial_condition') # for now, we won't define any outputs because they include the mesh # short name, which is not known at setup time. Currently, this is @@ -48,23 +33,11 @@ def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): def run(self): """ Run this step of the testcase - """ - with_ice_shelf_cavities = self.with_ice_shelf_cavities - - with xarray.open_dataset('restart.nc') as ds: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] - prefix = f'MPAS_Mesh_{mesh_prefix}' - creation_date = ds.attrs[f'{prefix}_Version_Creation_Date'] - - assembled_dir = f'../assembled_files/inputdata/ice/mpas-seaice/' \ - f'{mesh_short_name}' - try: - os.makedirs(assembled_dir) - except OSError: - pass + """ + super().run() - dest_filename = f'mpassi.{mesh_short_name}.{creation_date}.nc' + dest_filename = \ + f'mpassi.{self.mesh_short_name}.{self.creation_date}.nc' keep_vars = [ 'areaCell', 'cellsOnCell', 'edgesOnCell', 'fCell', 'indexToCellID', @@ -77,7 +50,7 @@ def run(self): 'fVertex', 'indexToVertexID', 'kiteAreasOnVertex', 'latVertex', 'lonVertex', 'xVertex', 'yVertex', 'zVertex'] - if with_ice_shelf_cavities: + if self.with_ice_shelf_cavities: keep_vars.append('landIceMask') with xarray.open_dataset('restart.nc') as ds: @@ -85,5 +58,5 @@ def run(self): ds = ds[keep_vars] write_netcdf(ds, dest_filename) - symlink(f'../../../../../seaice_initial_condition/{dest_filename}', - f'{assembled_dir}/{dest_filename}') + symlink(os.path.abspath(dest_filename), + f'{self.seaice_inputdata_dir}/{dest_filename}') diff --git a/compass/ocean/tests/global_ocean/global_ocean.cfg b/compass/ocean/tests/global_ocean/global_ocean.cfg index 543518c11d..c0cd1aeaa2 100644 --- a/compass/ocean/tests/global_ocean/global_ocean.cfg +++ b/compass/ocean/tests/global_ocean/global_ocean.cfg @@ -83,19 +83,6 @@ temperature_max = 33.0 # for E3SM [files_for_e3sm] -# whether to generate an ocean initial condition in E3SM -enable_ocean_initial_condition = true -# whether to generate graph partitions for different numbers of ocean cores in -# E3SM -enable_ocean_graph_partition = true -# whether to generate a sea-ice initial condition in E3SM -enable_seaice_initial_condition = true -# whether to generate SCRIP files for later use in creating E3SM mapping files -enable_scrip = true -# whether to generate region masks, transects and mapping files for use in both -# online analysis members and offline with MPAS-Analysis -enable_diagnostics_files = true - ## the following relate to the comparison grids in MPAS-Analysis to generate ## mapping files for. The default values are also the defaults in ## MPAS-Analysis. Coarser or finer resolution may be desirable for some MPAS @@ -140,3 +127,18 @@ comparisonSubpolarNorthAtlanticResolution = 20. # CMIP6 grid resolution cmip6_grid_res = 180x360 + +# the E3SM short name of the mesh or "autodetect" to use the +# MPAS_Mesh_Short_Name attribute of the mesh file +mesh_short_name = autodetect + +# the absolute path or relative path with respect to the test case's work +# directory of an ocean restart file on the given mesh +ocean_restart_filename = autodetect + +# the absolute path or relative path with respect to the test case's work +# directory of a graph file that corresponds to the mesh +graph_filename = autodetect + +# whether the mesh has ice-shelf cavities +with_ice_shelf_cavities = autodetect diff --git a/compass/ocean/tests/global_ocean/make_diagnostics_files/__init__.py b/compass/ocean/tests/global_ocean/make_diagnostics_files/__init__.py deleted file mode 100644 index da036bc9b7..0000000000 --- a/compass/ocean/tests/global_ocean/make_diagnostics_files/__init__.py +++ /dev/null @@ -1,150 +0,0 @@ -import os -import xarray -from datetime import datetime - -from mpas_tools.scrip.from_mpas import scrip_from_mpas - -from compass.io import symlink -from compass.testcase import TestCase -from compass.step import Step -from compass.ocean.tests.global_ocean.files_for_e3sm.diagnostics_files import \ - make_diagnostics_files -from compass.ocean.tests.global_ocean.files_for_e3sm.e3sm_to_cmip_maps import \ - make_e3sm_to_cmip_maps - - -class MakeDiagnosticsFiles(TestCase): - """ - A test case for making diagnostics files (mapping files and region masks) - from an existing mesh. - """ - def __init__(self, test_group): - """ - Create the test case - - Parameters - ---------- - test_group : compass.ocean.tests.global_ocean.GlobalOcean - The global ocean test group that this test case belongs to - """ - super().__init__(test_group=test_group, name='make_diagnostics_files') - - self.add_step(E3smToCmipMaps(test_case=self)) - self.add_step(DiagnosticsFiles(test_case=self)) - - def configure(self): - """ - Modify the configuration options for this test case - """ - self.config.add_from_package( - 'compass.ocean.tests.global_ocean.make_diagnostics_files', - 'make_diagnostics_files.cfg', exception=True) - - def run(self): - """ - Run each step of the testcase - """ - cores = self.config.getint('make_diagnostics_files', 'cores') - self.steps['diagnostics_files'].cpus_per_task = cores - self.steps['e3sm_to_cmip_maps'].ntasks = cores - - # run the step - super().run() - - -class E3smToCmipMaps(Step): - """ - A step for making e3sm_to_cmip mapping files - """ - def __init__(self, test_case): - """ - Create the step - - Parameters - ---------- - test_case : compass.ocean.tests.global_ocean.make_diagnostics_files.MakeDiagnosticsFiles - The test case this step belongs to - """ - super().__init__(test_case=test_case, name='e3sm_to_cmip_maps') - - # add both scrip files, since we don't know in advance which to use - self.add_input_file( - filename='cmip6_180x360_scrip.20181001.nc', - target='cmip6_180x360_scrip.20181001.nc', - database='map_database') - - self.add_input_file( - filename='cmip6_720x1440_scrip.20181001.nc', - target='cmip6_720x1440_scrip.20181001.nc', - database='map_database') - - def run(self): - """ - Run this step of the test case - """ - - config = self.config - section = config['make_diagnostics_files'] - mesh_filename = section.get('mesh_filename') - - symlink(os.path.join('..', mesh_filename), 'restart.nc') - - with xarray.open_dataset('restart.nc') as ds: - if 'MPAS_Mesh_Short_Name' in ds.attrs: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - else: - mesh_short_name = section.get('mesh_name') - - creation_date = config.get('global_ocean', 'creation_date') - if creation_date == 'autodetect': - if 'MPAS_Mesh_Prefix' in ds.attrs: - mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] - prefix = f'MPAS_Mesh_{mesh_prefix}' - creation_date = ds.attrs[f'{prefix}_Version_Creation_Date'] - else: - now = datetime.now() - creation_date = now.strftime("%Y%m%d") - - scrip_from_mpas('restart.nc', 'ocean.scrip.nc') - - make_e3sm_to_cmip_maps(self.config, self.logger, mesh_short_name, - creation_date, self.subdir, self.ntasks) - - -class DiagnosticsFiles(Step): - """ - A step for making diagnostics files (mapping files and region masks) from - an existing mesh. - """ - def __init__(self, test_case): - """ - Create the step - - Parameters - ---------- - test_case : compass.ocean.tests.global_ocean.make_diagnostics_files.MakeDiagnosticsFiles - The test case this step belongs to - """ - super().__init__(test_case=test_case, name='diagnostics_files') - - def run(self): - """ - Run this step of the test case - """ - - config = self.config - section = config['make_diagnostics_files'] - - mesh_filename = section.get('mesh_filename') - with_ice_shelf_cavities = section.getboolean('with_ice_shelf_cavities') - - symlink(os.path.join('..', mesh_filename), 'restart.nc') - - with xarray.open_dataset('restart.nc') as ds: - if 'MPAS_Mesh_Short_Name' in ds.attrs: - mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] - else: - mesh_short_name = section.get('mesh_name') - - make_diagnostics_files(self.config, self.logger, mesh_short_name, - with_ice_shelf_cavities, self.cpus_per_task) diff --git a/compass/ocean/tests/global_ocean/make_diagnostics_files/make_diagnostics_files.cfg b/compass/ocean/tests/global_ocean/make_diagnostics_files/make_diagnostics_files.cfg deleted file mode 100644 index 2de8ecc99f..0000000000 --- a/compass/ocean/tests/global_ocean/make_diagnostics_files/make_diagnostics_files.cfg +++ /dev/null @@ -1,16 +0,0 @@ -# config options related to diagnostics support files for an existing mesh -[make_diagnostics_files] - -# the E3SM short name of the mesh if not in the MPAS_Mesh_Short_Name attribute -# of the mesh file -# mesh_name = EC30to60E2r3 - -# the absolute path or relative path with respect to the test case's work -# directory of a mesh file with the given short name -mesh_filename = mesh.nc - -# the number of cores to use to make mapping files -cores = 1 - -# whether the mesh has ice-shelf cavities -with_ice_shelf_cavities = False diff --git a/docs/developers_guide/ocean/api.rst b/docs/developers_guide/ocean/api.rst index 6e46d0087c..8d3a337130 100644 --- a/docs/developers_guide/ocean/api.rst +++ b/docs/developers_guide/ocean/api.rst @@ -173,8 +173,10 @@ test cases and steps files_for_e3sm.scrip.Scrip.run files_for_e3sm.seaice_initial_condition.SeaiceInitialCondition files_for_e3sm.seaice_initial_condition.SeaiceInitialCondition.run - files_for_e3sm.diagnostics_files.DiagnosticsFiles - files_for_e3sm.diagnostics_files.DiagnosticsFiles.run + files_for_e3sm.diagnostic_maps.DiagnosticMaps + files_for_e3sm.diagnostic_maps.DiagnosticMaps.run + files_for_e3sm.diagnostic_masks.DiagnosticMasks + files_for_e3sm.diagnostic_masks.DiagnosticMasks.run init.Init init.Init.configure diff --git a/docs/developers_guide/ocean/test_groups/global_ocean.rst b/docs/developers_guide/ocean/test_groups/global_ocean.rst index 5d68ca9315..0bdc9aaa1a 100644 --- a/docs/developers_guide/ocean/test_groups/global_ocean.rst +++ b/docs/developers_guide/ocean/test_groups/global_ocean.rst @@ -914,8 +914,19 @@ The test case is made up of 5 steps: Otherwise, only one file is symlinked, and it is named ``ocean..scrip..nc`` -:py:class:`compass.ocean.tests.global_ocean.files_for_e3sm.diagnostics_files.DiagnosticsFiles` - creates mapping files and regions masks for E3SM analysis members and +:py:class:`compass.ocean.tests.global_ocean.files_for_e3sm.diagnostic_maps.DiagnosticMaps` + creates mapping files for + `MPAS-Analysis `_. + + Mapping files are created from the MPAS-Ocean and -Seaice mesh to 7 + standard comparison grids. Mapping files are created from both cells and + vertices on the MPAS mesh. The vertex maps are needed for quantities like + the barotropic streamfunction in MPAS-Ocean and ice speed in MPAS-Seaice. + The mapping files are symlinked in the directory + ``assembled_files/diagnostics/mpas_analysis/maps/``. + +:py:class:`compass.ocean.tests.global_ocean.files_for_e3sm.diagnostic_masks.DiagnosticMasks` + creates regions masks for E3SM analysis members and `MPAS-Analysis `_. Region masks are created using @@ -929,6 +940,8 @@ The test case is made up of 5 steps: 'Ocean Subbasins', 'ISMIP6 Regions', 'Transport Transects'] + If ice-shelf cavities are present in the mesh, the ``Ice Shelves`` + regions are also included. The resulting region masks are symlinked in the directory ``assembled_files/diagnostics/mpas_analysis/region_masks/`` and named ``_.nc`` @@ -938,11 +951,41 @@ The test case is made up of 5 steps: The resulting region mask is in the same directory as above, and named ``_moc_masks_and_transects.nc`` - Mapping files are created from the MPAS-Ocean and -Seaice mesh to 3 - standard comparison grids: a 0.5 x 0.5 degree longitude/latitude grid, - an Antarctic stereographic grid, and an Arctic stereographic grid. - The mapping files are symlinked in the directory - ``assembled_files/diagnostics/mpas_analysis/maps/`` - and named ``map__to_0.5x0.5degree_bilinear.nc``, - ``map__to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc``, - and ``map__to_6000.0x6000.0km_10.0km_Arctic_stereo_bilinear.nc``. +files_for_e3sm for an existing mesh +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The test case ``ocean/global_ocean/files_for_e3sm`` can be used to create all the +same files as in :ref:`dev_ocean_global_ocean_files_for_e3sm` but for an +existing mesh. To point to the existing mesh and associated graph file, the +following config options must be specified (typically by editing +``files_for_e3sm.cfg`` after setting up the test case): + +.. code-block:: ini + + # config options related to initial condition and diagnostics support files + # for E3SM + [files_for_e3sm] + + # the absolute path or relative path with respect to the test case's work + # directory of an ocean restart file on the given mesh + ocean_restart_filename = autodetect + + # the absolute path or relative path with respect to the test case's work + # directory of a graph file that corresponds to the mesh + graph_filename = autodetect + +The following will be detected from the metadata in the ocean restart file if +present but can be set if needed: + +.. code-block:: ini + + # config options related to initial condition and diagnostics support files + # for E3SM + [files_for_e3sm] + + # the E3SM short name of the mesh or "autodetect" to use the + # MPAS_Mesh_Short_Name attribute of the mesh file + mesh_short_name = autodetect + + # whether the mesh has ice-shelf cavities + with_ice_shelf_cavities = autodetect diff --git a/docs/users_guide/ocean/test_groups/global_ocean.rst b/docs/users_guide/ocean/test_groups/global_ocean.rst index 7bf3aa525f..e10437b0d2 100644 --- a/docs/users_guide/ocean/test_groups/global_ocean.rst +++ b/docs/users_guide/ocean/test_groups/global_ocean.rst @@ -118,19 +118,6 @@ Note that meshes and test cases may modify these options, as noted below. # for E3SM [files_for_e3sm] - # whether to generate an ocean initial condition in E3SM - enable_ocean_initial_condition = true - # whether to generate graph partitions for different numbers of ocean cores in - # E3SM - enable_ocean_graph_partition = true - # whether to generate a sea-ice initial condition in E3SM - enable_seaice_initial_condition = true - # whether to generate SCRIP files for later use in creating E3SM mapping files - enable_scrip = true - # whether to generate region masks, transects and mapping files for use in both - # online analysis members and offline with MPAS-Analysis - enable_diagnostics_files = true - ## the following relate to the comparison grids in MPAS-Analysis to generate ## mapping files for. The default values are also the defaults in ## MPAS-Analysis. Coarser or finer resolution may be desirable for some MPAS @@ -148,6 +135,49 @@ Note that meshes and test cases may modify these options, as noted below. comparisonArcticStereoWidth = 6000. comparisonArcticStereoResolution = 10. + # The extended Antarctic polar stereographic comparison grid size and + # resolution in km + comparisonAntarcticExtendedWidth = 9000. + comparisonAntarcticExtendedResolution = 15. + + # The extended Arctic polar stereographic comparison grid size and + # resolution in km + comparisonArcticExtendedWidth = 9000. + comparisonArcticExtendedResolution = 15. + + # The comparison North Atlantic grid size and resolution in km + comparisonNorthAtlanticWidth = 8500. + comparisonNorthAtlanticHeight = 5500. + comparisonNorthAtlanticResolution = 20. + + # The comparison North Pacific c grid size and resolution in km + comparisonNorthPacificWidth = 15000. + comparisonNorthPacificHeight = 5000. + comparisonNorthPacificResolution = 20. + + # The comparison North Atlantic grid size and resolution in km + comparisonSubpolarNorthAtlanticWidth = 7000. + comparisonSubpolarNorthAtlanticHeight = 4000. + comparisonSubpolarNorthAtlanticResolution = 20. + + # CMIP6 grid resolution + cmip6_grid_res = 180x360 + + # the E3SM short name of the mesh or "autodetect" to use the + # MPAS_Mesh_Short_Name attribute of the mesh file + mesh_short_name = autodetect + + # the absolute path or relative path with respect to the test case's work + # directory of an ocean restart file on the given mesh + ocean_restart_filename = autodetect + + # the absolute path or relative path with respect to the test case's work + # directory of a graph file that corresponds to the mesh + graph_filename = autodetect + + # whether the mesh has ice-shelf cavities + with_ice_shelf_cavities = autodetect + The ``cull_mesh_*``, ``init_*`` and ``forward:*`` config options are used to specify the resources used in in the ``mesh`` step of the :ref:`global_ocean_mesh`, the ``initial_state`` step of the :ref:`global_ocean_init` and the @@ -735,31 +765,33 @@ along with additional files required for full E3SM integration. Currently, there is not a way to use new meshes in E3SM without help from an expert from the E3SM team. -.. _global_ocean_make_diagnostic_files: +.. _global_ocean_files_for_e3sm_for_existing: -make_diagnostic_files test case -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +files_for_e3sm for an existing mesh +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Sometimes, we already have an E3SM initial condition but the diagnostics files -for `MPAS-Analysis `_ -either weren't created with the initial condition or they are out of date. +Sometimes, we already have an E3SM initial condition but some of the support +files, such as the diagnostics files for +`MPAS-Analysis `_ or the +graph partition files for MPAS-Seaice, either weren't created with the initial +condition or they are out of date. The ``ocean/global_ocean/files_for_e3sm`` +test case is useful for creating these files. The user should create a local symlink to an E3SM initial condition for -MPAS-Ocean ofr the desired mesh. Then, the config options in -`make_diagnostics_files.cfg` should be edited. In this example, we have -created a local link to the `ocean.WCAtl12to45E2r4.210318.nc` initial condition -in the test case directory. The mesh name has also been set to the E3SM short -name for this mesh `WCAtl12to45E2r4`. We use all 36 cores on a node (this -test case can't use multiple nodes for most steps). We indicate that the mesh -does not include ice-shelf cavities, which means we don't compute masks for -ice-shelf melt rates. +MPAS-Ocean for the desired mesh. Then, the config options in +``files_for_e3sm.cfg`` should be edited. In this example, we have +created a local link to the ``ocean.WCAtl12to45E2r4.210318.nc`` initial +condition and the ``mpas-o.graph.info.210318`` graph file in the test case +directory. The mesh name has also been set to the E3SM short name for this +mesh ``WCAtl12to45E2r4``. We indicate that the mesh does not include ice-shelf +cavities, which means we don't compute masks for ice-shelf melt rates. .. code-block:: cfg - [make_diagnostics_files] - mesh_name = WCAtl12to45E2r4 - mesh_filename = ocean.WCAtl12to45E2r4.210318.nc - cores = 36 + [files_for_e3sm] + mesh_short_name = WCAtl12to45E2r4 + ocean_restart_filename = ocean.WCAtl12to45E2r4.210318.nc + graph_filename = mpas-o.graph.info.210318 with_ice_shelf_cavities = False The resulting files are symlinked in a subdirectory of the test case called