forked from NOAA-GFDL/MDTF-diagnostics
-
Notifications
You must be signed in to change notification settings - Fork 0
186 lines (185 loc) · 9.82 KB
/
mdtf_tests.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
# This file builds and runs a lightweight version of the MDTF test suite.
# Note that the tests assess functionality of the diagnostics,
# and do not evaluate data or scientific content.
name: MDTF_test
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
shell: bash -l {0}
jobs:
build:
runs-on: ${{matrix.os}}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
json-file: ["tests/github_actions_test_ubuntu_set1.jsonc","tests/github_actions_test_macos_set1.jsonc"]
json-file-set2: ["tests/github_actions_test_ubuntu_set2.jsonc", "tests/github_actions_test_macos_set2.jsonc"]
json-file-set3: ["tests/github_actions_test_ubuntu_set3.jsonc", "tests/github_actions_test_macos_set3.jsonc"]
# if experimental is true, other jobs to run if one fails
experimental: [false]
exclude:
- os: ubuntu-latest
json-file: "tests/github_actions_test_macos_set1.jsonc"
- os: ubuntu-latest
json-file-set2: "tests/github_actions_test_macos_set2.jsonc"
- os: ubuntu-latest
json-file-set3: "tests/github_actions_test_macos_set3.jsonc"
- os: macos-latest
json-file: "tests/github_actions_test_ubuntu_set1.jsonc"
- os: macos-latest
json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc"
- os: macos-latest
json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc"
max-parallel: 2
steps:
- uses: actions/checkout@v3
# Set up Micromamba
- uses: mamba-org/setup-micromamba@v1
with:
init-shell: bash
condarc: |
channels:
- conda-forge
- defaults
- name: Initialize and verify miniconda installation
run: |
conda init bash
conda info -a
- name: Install XQuartz if macOS
if: ${{ matrix.os == 'macos-latest'}}
run: |
echo "Installing XQuartz"
brew install --cask xquartz
- name: Set environment variables
run: |
echo "POD_OUTPUT=$(echo $PWD/../wkdir/GFDL.Synthetic)" >> $GITHUB_ENV
- name: Install Conda Environments
run: |
echo "Installing Conda Environments"
# MDTF-specific setup: install all conda envs
micromamba create -f ./src/conda/env_base.yml
micromamba create -f ./src/conda/env_python3_base.yml
micromamba create -f ./src/conda/env_NCL_base.yml
micromamba create -f ./src/conda/env_R_base.yml
echo "Creating the _MDTF_synthetic_data environment"
micromamba create -f ./src/conda/_env_synthetic_data.yml
- name: Generate Model Data
run: |
cd ../
micromamba activate _MDTF_synthetic_data
pip install mdtf-test-data
mkdir mdtf_test_data ; cd mdtf_test_data
# generate the data and run unit tests
mdtf_synthetic.py -c GFDL --startyear 1 --nyears 10 --unittest
mdtf_synthetic.py -c NCAR --startyear 1975 --nyears 7
mdtf_synthetic.py -c CMIP --startyear 1990 --nyears 20
cd ../
mkdir wkdir
- name: Get Observational Data for Set 1
run: |
echo "${PWD}"
cd ../
echo "Available Space"
df -h
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/Wheeler_Kiladis_obs_data.tar --output Wheeler_Kiladis_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/precip_diurnal_cycle_obs_data.tar --output precip_diurnal_cycle_obs_data.tar
## make input data directories
mkdir -p inputdata/obs_data
echo "Untarring set 1 NCAR/CESM standard test files"
tar -xvf convective_transition_diag_obs_data.tar
tar -xvf EOF_500hPa_obs_data.tar
tar -xvf precip_diurnal_cycle_obs_data.tar
tar -xvf MJO_teleconnection_obs_data.tar
tar -xvf MJO_suite_obs_data.tar
tar -xvf Wheeler_Kiladis_obs_data.tar
# clean up tarballs
rm -f *.tar
- name: Run diagnostic tests set 1
run: |
echo "POD_OUTPUT is: "
echo "${POD_OUTPUT}"
micromamba activate _MDTF_base
# trivial check that install script worked
./mdtf_framework.py --version
# run the test PODs
./mdtf_framework.py -v -f ${{matrix.json-file}}
- name: Get observational data for set 2
run: |
echo "${PWD}"
# remove data from previous run
# Actions moves you to the root repo directory in every step, so need to cd again
cd ../inputdata/obs_data
echo "deleting obs data from set 1"
rm -rf *
cd ../../
echo "Available Space"
df -h
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_prop_amp_obs_data.tar --output MJO_prop_amp_obs_data.tar
echo "Untarring set 2 GFDL standard test files"
tar -xvf MJO_prop_amp_obs_data.tar
# clean up tarballs
rm -f *.tar
- name: Run diagnostic tests set 2
run: |
micromamba activate _MDTF_base
# run the test PODs
./mdtf_framework.py -v -f ${{matrix.json-file-set2}}
# Uncomment the following line for debugging
# cat ../wkdir/MDTF_GFDL.Synthetic_1_10/MJO_prop_amp/MJO_prop_amp.log
- name: Get observational data for set 3
run: |
echo "${PWD}"
# remove data from previous run
# Actions moves you to the root repo directory in every step, so need to cd again
cd ../inputdata/obs_data
echo "deleting obs data from set 2"
rm -rf *
cd ../../
echo "Available Space"
df -h
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/temp_extremes_distshape_obs_data.tar --output temp_extremes_distshape_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/tropical_pacific_sea_level_obs_data.tar.gz --output tropical_pacific_sea_level_obs_data.tar.gz
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/ocn_surf_flux_diag_obs_data.tar --output ocn_surf_flux_diag_obs_data.tar
# curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/albedofb_obs_data.tar --output albedofb_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/seaice_suite_obs_data.tar --output seaice_suite_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/stc_eddy_heat_fluxes_obs_data.tar --output stc_eddy_heat_fluxes_obs_data.tar
echo "Untarring set 3 CMIP standard test files"
tar -xvf temp_extremes_distshape_obs_data.tar
tar -zxvf tropical_pacific_sea_level_obs_data.tar.gz
tar -xvf mixed_layer_depth_obs_data.tar
tar -xvf ocn_surf_flux_diag_obs_data.tar
# tar -xvf albedofb_obs_data.tar
tar -xvf seaice_suite_obs_data.tar
tar -xvf stc_eddy_heat_fluxes_obs_data.tar
# clean up tarballs
rm -f *.tar
rm -f *.tar.gz
- name: Run CMIP diagnostic tests set 3
run: |
micromamba activate _MDTF_base
# run the test PODs
./mdtf_framework.py -v -f ${{matrix.json-file-set3}}
- name: Run unit tests
run: |
micromamba activate _MDTF_base
python -m unittest discover