Per #2742, fix StatAnalysis to set fcst_lev and obs_lev instead of fc… #4835
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Testing | |
on: | |
push: | |
branches: | |
- develop | |
- develop-ref | |
- 'feature_**' | |
- 'main_**' | |
- 'bugfix_**' | |
paths-ignore: | |
- 'docs/**' | |
- '.github/pull_request_template.md' | |
- '.github/ISSUE_TEMPLATE/**' | |
- '.github/labels/**' | |
- 'build_components/**' | |
- 'manage_externals/**' | |
- '**/README.md' | |
- '**/LICENSE.md' | |
pull_request: | |
types: [opened, synchronize, reopened] | |
branches: | |
- develop | |
- 'main_*' | |
paths-ignore: | |
- 'docs/**' | |
- '.github/pull_request_template.md' | |
- '.github/ISSUE_TEMPLATE/**' | |
- '.github/labels/**' | |
- 'build_components/**' | |
- 'manage_externals/**' | |
- '**/README.md' | |
- '**/LICENSE.md' | |
workflow_dispatch: | |
inputs: | |
force_met_image: | |
description: 'MET DockerHub repo to force run to use, e.g. met:11.1.0 or met-dev:feature_XYZ_name-PR. Leave this blank to determine repo automatically.' | |
repository: | |
description: 'Repository that triggered workflow (used by external repo triggering)' | |
sha: | |
description: 'Commit hash that triggered the event (used by external repo triggering)' | |
ref: | |
description: 'Branch that triggered event (used by external repo triggering)' | |
actor: | |
description: 'User that triggered the event (used by external repo triggering)' | |
pusher_email: | |
description: 'Email address of user who triggered push event (used by external repo triggering)' | |
jobs: | |
event_info: | |
name: "Trigger: ${{ github.event_name != 'workflow_dispatch' && github.event_name || github.event.inputs.repository }} ${{ github.event_name != 'workflow_dispatch' && 'event' || github.event.inputs.sha }} ${{ github.event_name != 'workflow_dispatch' && 'local' || github.event.inputs.actor }} " | |
runs-on: ubuntu-latest | |
steps: | |
- name: Print GitHub values for reference | |
env: | |
GITHUB_CONTEXT: ${{ toJson(github) }} | |
run: echo "$GITHUB_CONTEXT" | |
- name: Build URL for commit that triggered workflow | |
if: github.event_name == 'workflow_dispatch' | |
run: echo https://github.com/${{ github.event.inputs.repository }}/commit/${{ github.event.inputs.sha }} | |
job_control: | |
name: Determine which jobs to run | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set job controls | |
id: job_status | |
run: .github/jobs/set_job_controls.sh | |
env: | |
commit_msg: ${{ github.event.head_commit.message }} | |
outputs: | |
matrix: ${{ steps.job_status.outputs.matrix }} | |
run_some_tests: ${{ steps.job_status.outputs.run_some_tests }} | |
run_get_image: ${{ steps.job_status.outputs.run_get_image }} | |
run_get_input_data: ${{ steps.job_status.outputs.run_get_input_data }} | |
run_diff: ${{ steps.job_status.outputs.run_diff }} | |
run_unit_tests: ${{ steps.job_status.outputs.run_unit_tests }} | |
run_save_truth_data: ${{ steps.job_status.outputs.run_save_truth_data }} | |
external_trigger: ${{ steps.job_status.outputs.external_trigger }} | |
branch_name: ${{ steps.job_status.outputs.branch_name }} | |
get_image: | |
name: Docker Setup - Get METplus Image | |
runs-on: ubuntu-latest | |
needs: job_control | |
if: ${{ needs.job_control.outputs.run_get_image == 'true' && needs.job_control.outputs.run_some_tests == 'true' }} | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: '3.10' | |
- name: Free disk space | |
run: .github/jobs/free_disk_space.sh | |
- name: Get METplus Image | |
run: .github/jobs/docker_setup.sh | |
env: | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} | |
EXTERNAL_TRIGGER: ${{ needs.job_control.outputs.external_trigger }} | |
SET_MET_IMAGE: ${{ github.event.inputs.force_met_image }} | |
update_data_volumes: | |
name: Docker Setup - Update Data Volumes | |
runs-on: ubuntu-latest | |
needs: job_control | |
if: ${{ needs.job_control.outputs.run_get_input_data == 'true' && needs.job_control.outputs.run_some_tests == 'true' }} | |
continue-on-error: true | |
steps: | |
- uses: dtcenter/metplus-action-data-update@v3 | |
with: | |
docker_name: ${{ secrets.DOCKER_USERNAME }} | |
docker_pass: ${{ secrets.DOCKER_PASSWORD }} | |
repo_name: ${{ github.repository }} | |
data_prefix: sample_data | |
branch_name: ${{ needs.job_control.outputs.branch_name }} | |
docker_data_dir: /data/input/METplus_Data | |
data_repo_dev: metplus-data-dev | |
data_repo_stable: metplus-data | |
use_feature_data: true | |
tag_max_pages: 15 | |
unit_tests: | |
name: Unit Tests | |
runs-on: ubuntu-latest | |
needs: [job_control] | |
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }} | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: '3.10' | |
cache: 'pip' | |
- name: Install Python Test Dependencies | |
run: | | |
python3 -m pip install --upgrade pip | |
python3 -m pip install -r internal/tests/pytests/requirements.txt | |
- name: Install ImageMagick convert | |
run: sudo apt install imagemagick | |
- name: Run Pytests | |
run: coverage run -m pytest internal/tests/pytests | |
env: | |
METPLUS_TEST_OUTPUT_BASE: ${{ runner.workspace }}/pytest_output | |
- name: Generate coverage report | |
run: | | |
coverage report -m --fail-under=90 || echo "::error file=coverage,line=1,col=1::Code coverage is below 90%" | |
if: always() | |
continue-on-error: true | |
- name: Run Coveralls | |
uses: AndreMiras/coveralls-python-action@8799c9f4443ac4201d2e2f2c725d577174683b99 | |
if: always() | |
continue-on-error: true | |
with: | |
github-token: ${{ secrets.GITHUB_TOKEN }} | |
use_case_tests: | |
name: Use Case Tests | |
runs-on: ubuntu-latest | |
needs: [get_image, update_data_volumes, job_control, unit_tests] | |
if: ${{ needs.job_control.outputs.run_some_tests == 'true' }} | |
strategy: | |
fail-fast: false | |
matrix: ${{fromJson(needs.job_control.outputs.matrix)}} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Free disk space | |
run: .github/jobs/free_disk_space.sh | |
- name: Create directories for database | |
run: .github/jobs/create_dirs_for_database.sh | |
- name: Create directory for artifacts | |
run: mkdir -p artifact | |
- name: Get artifact name | |
id: get-artifact-name | |
run: | | |
artifact_name=`.github/jobs/get_artifact_name.sh ${{ matrix.categories }}` | |
echo "artifact_name=${artifact_name}" >> $GITHUB_OUTPUT | |
# run use case tests | |
- name: Run Use Cases | |
uses: ./.github/actions/run_tests | |
id: run_tests | |
with: | |
categories: ${{ matrix.categories }} | |
# copy logs with errors to error_logs directory to save as artifact | |
- name: Save error logs | |
id: save-errors | |
if: ${{ always() && steps.run_tests.conclusion == 'failure' }} | |
run: .github/jobs/save_error_logs.sh | |
# run difference testing | |
- name: Run difference tests | |
id: run-diff | |
if: ${{ needs.job_control.outputs.run_diff == 'true' && steps.run_tests.conclusion == 'success' }} | |
run: .github/jobs/run_difference_tests.sh ${{ matrix.categories }} ${{ steps.get-artifact-name.outputs.artifact_name }} | |
# copy output data to save as artifact | |
- name: Save output data | |
id: save-output | |
if: ${{ always() && steps.run_tests.conclusion != 'skipped' }} | |
run: .github/jobs/copy_output_to_artifact.sh ${{ steps.get-artifact-name.outputs.artifact_name }} | |
- name: Upload output data artifact | |
uses: actions/upload-artifact@v4 | |
if: ${{ always() && steps.run_tests.conclusion != 'skipped' }} | |
with: | |
name: ${{ steps.get-artifact-name.outputs.artifact_name }} | |
path: artifact/${{ steps.get-artifact-name.outputs.artifact_name }} | |
- name: Upload error logs artifact | |
uses: actions/upload-artifact@v4 | |
if: ${{ always() && steps.save-errors.outputs.upload_error_logs }} | |
with: | |
name: error_logs-${{ steps.get-artifact-name.outputs.artifact_name }} | |
path: artifact/error_logs | |
if-no-files-found: ignore | |
- name: Upload difference data artifact | |
uses: actions/upload-artifact@v4 | |
if: ${{ always() && steps.run-diff.outputs.upload_diff == 'true' }} | |
with: | |
name: diff-${{ steps.get-artifact-name.outputs.artifact_name }} | |
path: artifact/diff-${{ steps.get-artifact-name.outputs.artifact_name }} | |
if-no-files-found: ignore | |
merge_error_logs: | |
name: Merge Error Logs | |
runs-on: ubuntu-latest | |
needs: use_case_tests | |
if: ${{ always() && needs.use_case_tests.result == 'failure' }} | |
steps: | |
- name: Check for error logs | |
id: check-for-error-logs | |
run: | | |
json_data=$(curl -s -H "Authorization: Bearer ${{ github.token }}" \ | |
"https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/jobs") | |
error_log_jobs=($(echo "$json_data" | jq -r '.jobs[] | select(.name | startswith("Use Case Tests")) | .steps[] | select(.name | startswith("Save error logs")) | select(.conclusion | startswith("success"))')) | |
# save output variable to merge error logs if any error logs were created | |
if [ ! -z "${error_log_jobs}" ]; then | |
echo "has_error_logs=true" >> $GITHUB_OUTPUT | |
else | |
echo "has_error_logs=false" >> $GITHUB_OUTPUT | |
fi | |
- name: Merge Artifacts | |
if: ${{ always() && steps.check-for-error-logs.outputs.has_error_logs == 'true' }} | |
uses: actions/upload-artifact/merge@v4 | |
with: | |
name: error_logs | |
pattern: error_logs-* | |
delete-merged: true | |
create_output_data_volumes: | |
name: Create Output Docker Data Volumes | |
runs-on: ubuntu-latest | |
needs: [use_case_tests] | |
if: ${{ needs.job_control.outputs.run_save_truth_data == 'true' }} | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/download-artifact@v4 | |
- run: .github/jobs/create_output_data_volumes.sh | |
env: | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} |