Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop' into feature_2842_ugrid…
Browse files Browse the repository at this point in the history
…_config
  • Loading branch information
Howard Soh committed Apr 17, 2024
2 parents 42f65bd + 2a26d59 commit 301a14b
Show file tree
Hide file tree
Showing 163 changed files with 4,507 additions and 3,042 deletions.
2 changes: 1 addition & 1 deletion .github/jobs/build_docker_image.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ time_command docker build -t ${DOCKERHUB_TAG} \
--build-arg MET_CONFIG_OPTS \
-f $DOCKERFILE_PATH ${GITHUB_WORKSPACE}
if [ $? != 0 ]; then
cat ${GITHUB_WORKSPACE}/docker_build.log
cat ${CMD_LOGFILE}
exit 1
fi

Expand Down
45 changes: 45 additions & 0 deletions .github/jobs/build_sonarqube_image.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#! /bin/bash

source ${GITHUB_WORKSPACE}/.github/jobs/bash_functions.sh

DOCKERHUB_TAG=met-sonarqube-gha

DOCKERFILE_PATH=${GITHUB_WORKSPACE}/internal/scripts/docker/Dockerfile.sonarqube

CMD_LOGFILE=${GITHUB_WORKSPACE}/sonarqube_build.log

#
# Define the $SONAR_REFERENCE_BRANCH as the
# - Target of any requests
# - Manual setting for workflow dispatch
# - Source branch for any pushes (e.g. develop)
#
if [ "${GITHUB_EVENT_NAME}" == "pull_request" ]; then
export SONAR_REFERENCE_BRANCH=${GITHUB_BASE_REF}
elif [ "${GITHUB_EVENT_NAME}" == "workflow_dispatch" ]; then
export SONAR_REFERENCE_BRANCH=${WD_REFERENCE_BRANCH}
else
export SONAR_REFERENCE_BRANCH=${SOURCE_BRANCH}
fi

echo SONAR_REFERENCE_BRANCH=${SONAR_REFERENCE_BRANCH}

time_command docker build -t ${DOCKERHUB_TAG} \
--build-arg MET_BASE_REPO \
--build-arg MET_BASE_TAG \
--build-arg SOURCE_BRANCH \
--build-arg SONAR_SCANNER_VERSION \
--build-arg SONAR_HOST_URL \
--build-arg SONAR_TOKEN \
--build-arg SONAR_REFERENCE_BRANCH \
-f $DOCKERFILE_PATH ${GITHUB_WORKSPACE}
if [ $? != 0 ]; then
cat ${CMD_LOGFILE}
exit 1
fi

# Copy the .scannerwork directory from the image
id=$(docker create ${DOCKERHUB_TAG})
time_command mkdir -p /tmp/scannerwork
time_command docker cp $id:/met/.scannerwork/report-task.txt /tmp/scannerwork/report-task.txt
docker rm -v $id
3 changes: 3 additions & 0 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ If **yes**, describe the new output and/or changes to the existing output:</br>
- [ ] Will this PR result in changes to existing METplus Use Cases? **[Yes or No]**</br>
If **yes**, create a new **Update Truth** [METplus issue](https://github.com/dtcenter/METplus/issues/new/choose) to describe them.

- [ ] Do these changes introduce new SonarQube findings? **[Yes or No]**</br>
If **yes**, please describe:

- [ ] Please complete this pull request review by **[Fill in date]**.</br>

## Pull Request Checklist ##
Expand Down
91 changes: 91 additions & 0 deletions .github/workflows/sonarqube.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
name: SonarQube Scan

# Run SonarQube for Pull Requests and changes to the develop and main_vX.Y branches

on:

# Trigger analysis for pushes to develop and main_vX.Y branches
push:
branches:
- develop
- 'main_v**'
paths-ignore:
- 'docs/**'
- '.github/pull_request_template.md'
- '.github/ISSUE_TEMPLATE/**'
- '.github/labels/**'
- '**/README.md'
- '**/LICENSE.md'

# Trigger analysis for pull requests to develop and main_vX.Y branches
pull_request:
types: [opened, synchronize, reopened]
branches:
- develop
- 'main_v**'
paths-ignore:
- 'docs/**'
- '.github/pull_request_template.md'
- '.github/ISSUE_TEMPLATE/**'
- '.github/labels/**'
- '**/README.md'
- '**/LICENSE.md'

workflow_dispatch:
inputs:
reference_branch:
description: 'Reference Branch'
default: develop
type: string

jobs:
build:
name: SonarQube Scan
runs-on: ubuntu-latest

steps:

- uses: actions/checkout@v4
with:
# Disable shallow clones for better analysis
fetch-depth: 0

- name: Create output directories
run: mkdir -p ${RUNNER_WORKSPACE}/logs

- name: Get branch name
id: get_branch_name
run: echo branch_name=${GITHUB_REF#refs/heads/} >> $GITHUB_OUTPUT

- name: SonarQube Scan in Docker
run: .github/jobs/build_sonarqube_image.sh
env:
MET_BASE_REPO: met-base
MET_BASE_TAG: v3.2
SOURCE_BRANCH: ${{ steps.get_branch_name.outputs.branch_name }}
WD_REFERENCE_BRANCH: ${{ github.event.inputs.reference_branch }}
SONAR_SCANNER_VERSION: 5.0.1.3006
SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

- name: SonarQube Quality Gate check
id: sonarqube-quality-gate-check
uses: sonarsource/sonarqube-quality-gate-action@master
with:
scanMetadataReportFile: /tmp/scannerwork/report-task.txt
timeout-minutes: 5
env:
SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

- name: Copy log files into logs directory
if: always()
run: cp ${GITHUB_WORKSPACE}/*.log ${RUNNER_WORKSPACE}/logs/

- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_sonarqube
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
3 changes: 1 addition & 2 deletions data/config/TCRMWConfig_default
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,8 @@ regrid = {
//
n_range = 100;
n_azimuth = 180;
max_range_km = 1000.0;
delta_range_km = 10.0;
rmw_scale = 0.2;
rmw_scale = NA;

//
// Optionally convert u/v winds to tangential/radial winds
Expand Down
53 changes: 46 additions & 7 deletions docs/Users_Guide/reformat_point.rst
Original file line number Diff line number Diff line change
Expand Up @@ -458,6 +458,8 @@ While initial versions of the ASCII2NC tool only supported a simple 11 column AS

• `International Soil Moisture Network (ISMN) Data format <https://ismn.bafg.de/en/>`_.

• `International Arctic Buoy Programme (IABP) Data format <https://iabp.apl.uw.edu/>`_.

• `AErosol RObotic NEtwork (AERONET) versions 2 and 3 format <http://aeronet.gsfc.nasa.gov/>`_

• Python embedding of point observations, as described in :numref:`pyembed-point-obs-data`. See example below in :numref:`ascii2nc-pyembed`.
Expand Down Expand Up @@ -522,6 +524,8 @@ Once the ASCII point observations have been formatted as expected, the ASCII fil
netcdf_file
[-format ASCII_format]
[-config file]
[-valid_beg time]
[-valid_end time]
[-mask_grid string]
[-mask_poly file]
[-mask_sid file|list]
Expand All @@ -541,21 +545,25 @@ Required Arguments for ascii2nc
Optional Arguments for ascii2nc
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

3. The **-format ASCII_format** option may be set to "met_point", "little_r", "surfrad", "wwsis", "airnowhourlyaqobs", "airnowhourly", "airnowdaily_v2", "ndbc_standard", "ismn", "aeronet", "aeronetv2", "aeronetv3", or "python". If passing in ISIS data, use the "surfrad" format flag.
3. The **-format ASCII_format** option may be set to "met_point", "little_r", "surfrad", "wwsis", "airnowhourlyaqobs", "airnowhourly", "airnowdaily_v2", "ndbc_standard", "ismn", "iabp", "aeronet", "aeronetv2", "aeronetv3", or "python". If passing in ISIS data, use the "surfrad" format flag.

4. The **-config file** option is the configuration file for generating time summaries.

5. The **-mask_grid** string option is a named grid or a gridded data file to filter the point observations spatially.
5. The **-valid_beg** time option in YYYYMMDD[_HH[MMSS]] format sets the beginning of the retention time window.

6. The **-mask_poly** file option is a polyline masking file to filter the point observations spatially.
6. The **-valid_end** time option in YYYYMMDD[_HH[MMSS]] format sets the end of the retention time window.

7. The **-mask_sid** file|list option is a station ID masking file or a comma-separated list of station ID's to filter the point observations spatially. See the description of the "sid" entry in :numref:`config_options`.
7. The **-mask_grid** string option is a named grid or a gridded data file to filter the point observations spatially.

8. The **-log file** option directs output and errors to the specified log file. All messages will be written to that file as well as standard out and error. Thus, users can save the messages without having to redirect the output on the command line. The default behavior is no log file.
8. The **-mask_poly** file option is a polyline masking file to filter the point observations spatially.

9. The **-v level** option indicates the desired level of verbosity. The value of "level" will override the default setting of 2. Setting the verbosity to 0 will make the tool run with no log messages, while increasing the verbosity above 1 will increase the amount of logging.
9. The **-mask_sid** file|list option is a station ID masking file or a comma-separated list of station ID's to filter the point observations spatially. See the description of the "sid" entry in :numref:`config_options`.

10. The **-compress level** option indicates the desired level of compression (deflate level) for NetCDF variables. The valid level is between 0 and 9. The value of "level" will override the default setting of 0 from the configuration file or the environment variable MET_NC_COMPRESS. Setting the compression level to 0 will make no compression for the NetCDF output. Lower number is for fast compression and higher number is for better compression.
10. The **-log file** option directs output and errors to the specified log file. All messages will be written to that file as well as standard out and error. Thus, users can save the messages without having to redirect the output on the command line. The default behavior is no log file.

11. The **-v level** option indicates the desired level of verbosity. The value of "level" will override the default setting of 2. Setting the verbosity to 0 will make the tool run with no log messages, while increasing the verbosity above 1 will increase the amount of logging.

12. The **-compress level** option indicates the desired level of compression (deflate level) for NetCDF variables. The valid level is between 0 and 9. The value of "level" will override the default setting of 0 from the configuration file or the environment variable MET_NC_COMPRESS. Setting the compression level to 0 will make no compression for the NetCDF output. Lower number is for fast compression and higher number is for better compression.

An example of the ascii2nc calling sequence is shown below:

Expand Down Expand Up @@ -1203,3 +1211,34 @@ For how to use the script, issue the command:
.. code-block:: none
python3 MET_BASE/python/utility/print_pointnc2ascii.py -h
IABP retrieval Python Utilities
====================================

`International Arctic Buoy Programme (IABP) Data <https://iabp.apl.uw.edu/>`_ is one of the data types supported by ascii2nc. A utility script that pulls all this data from the web and stores it locally, called get_iabp_from_web.py is included. This script accesses the appropriate webpage and downloads the ascii files for all buoys. It is straightforward, but can be time intensive as the archive of this data is extensive and files are downloaded one at a time.

The script can be found at:

.. code-block:: none
MET_BASE/python/utility/get_iabp_from_web.py
For how to use the script, issue the command:

.. code-block:: none
python3 MET_BASE/python/utility/get_iabp_from_web.py -h
Another IABP utility script is included for users, to be run after all files have been downloaded using get_iabp_from_web.py. This script examines all the files and lists those files that contain entries that fall within a user specified range of days. It is called find_iabp_in_timerange.py.

The script can be found at:

.. code-block:: none
MET_BASE/python/utility/find_iabp_in_timerange.py
For how to use the script, issue the command:

.. code-block:: none
python3 MET_BASE/python/utility/find_iabp_in_timerange.py -h
14 changes: 3 additions & 11 deletions docs/Users_Guide/tc-rmw.rst
Original file line number Diff line number Diff line change
Expand Up @@ -101,27 +101,19 @@ The **n_azimuth** parameter is the number of equally spaced azimuth intervals in

_______________________

.. code-block:: none
max_range_km = 100.0;
The **max_range_km** parameter specifies the maximum range of the range-azimuth grid, in kilometers. If this parameter is specified and not **rmw_scale**, the radial grid spacing will be **max_range_km / n_range**.

_______________________

.. code-block:: none
delta_range_km = 10.0;
The **delta_range_km** parameter specifies the spacing of the range rings, in kilometers.
The **delta_range_km** parameter specifies the spacing of the range rings, in kilometers. The range values start with 0 km and extend out to **n_range - 1** times this delta spacing.

_______________________

.. code-block:: none
rmw_scale = 0.2;
rmw_scale = NA;
The **rmw_scale** parameter overrides the **max_range_km** parameter. When this is set the radial grid spacing will be **rmw_scale** in units of the RMW, which varies along the storm track.
If changed from its default value of **NA**, the **rmw_scale** parameter overrides the **delta_range_km** parameter. The radial grid spacing is defined using **rmw_scale** in units of the RMW, which varies along the storm track. For example, setting **rmw_scale** to 0.2 would define the delta range spacing as 20% of the radius of maximum winds around each point. Note that RMW is defined in nautical miles but is converted to kilometers for this computation.

_______________________

Expand Down
94 changes: 94 additions & 0 deletions internal/scripts/docker/Dockerfile.sonarqube
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
ARG MET_BASE_REPO=met-base
ARG MET_BASE_TAG=v3.2

FROM dtcenter/${MET_BASE_REPO}:${MET_BASE_TAG}
MAINTAINER John Halley Gotway <[email protected]>

#
# This Dockerfile checks out MET from GitHub and runs the
# SonarQube static code analysis on the specified branch or tag.
# https://docs.sonarqube.org/latest/analysis/scan/sonarscanner/
#
ARG SONAR_SCANNER_VERSION=5.0.1.3006
ARG SONAR_HOST_URL
ARG SONAR_TOKEN
ARG SOURCE_BRANCH
ARG SONAR_REFERENCE_BRANCH

#
# SONAR_HOST_URL is required.
#
RUN if [ "x${SONAR_HOST_URL}" = "x" ]; then \
echo "ERROR: SONAR_HOST_URL undefined! Rebuild with \"--build-arg SONAR_HOST_URL={url}\""; \
exit 1; \
fi

#
# SONAR_TOKEN is required.
#
RUN if [ "x${SONAR_TOKEN}" = "x" ]; then \
echo "ERROR: SONAR_TOKEN undefined! Rebuild with \"--build-arg SONAR_TOKEN={token}\""; \
exit 1; \
fi

#
# SOURCE_BRANCH is the branch name of the MET source code.
#
RUN if [ "x${SOURCE_BRANCH}" = "x" ]; then \
echo "ERROR: SOURCE_BRANCH undefined! Rebuild with \"--build-arg SOURCE_BRANCH={branch name}\""; \
exit 1; \
else \
echo "Build Argument SOURCE_BRANCH=${SOURCE_BRANCH}"; \
fi

#
# SONAR_REFERENCE_BRANCH defines to the version against which this scan should be compared.
#
RUN if [ "x${SONAR_REFERENCE_BRANCH}" = "x" ]; then \
echo "ERROR: SONAR_REFERENCE_BRANCH undefined! Rebuild with \"--build-arg SONAR_REFERENCE_BRANCH={branch name}\""; \
exit 1; \
else \
echo "Build Argument SONAR_REFERENCE_BRANCH=${SONAR_REFERENCE_BRANCH}"; \
fi

ENV MET_GIT_NAME ${SOURCE_BRANCH}
ENV MET_REPO_DIR /met/MET-${MET_GIT_NAME}
ENV MET_GIT_URL https://github.com/dtcenter/MET

#
# Download and install the Sonar software.
#
RUN echo "Installing SonarQube into $HOME/.sonar" \
&& mkdir -p $HOME/.sonar \
&& curl -sSLo $HOME/.sonar/sonar-scanner.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${SONAR_SCANNER_VERSION}-linux.zip \
&& unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/ \
&& echo export PATH="$HOME/.sonar/sonar-scanner-${SONAR_SCANNER_VERSION}-linux/bin:\$PATH" >> $HOME/.bashrc \
&& curl -sSLo $HOME/.sonar/build-wrapper-linux-x86.zip ${SONAR_HOST_URL}/static/cpp/build-wrapper-linux-x86.zip \
&& unzip -o $HOME/.sonar/build-wrapper-linux-x86.zip -d $HOME/.sonar/ \
&& echo export PATH="$HOME/.sonar/build-wrapper-linux-x86:\$PATH" >> $HOME/.bashrc

#
# Update the OS, as needed.
#
RUN apt update

#
# Set the working directory.
#
WORKDIR /met

#
# Copy MET Download and install MET.
#
RUN echo "Copying MET into ${MET_REPO_DIR}" \
&& mkdir -p ${MET_REPO_DIR}

COPY . ${MET_REPO_DIR}

RUN if [ ! -e "${MET_REPO_DIR}/configure.ac" ]; then \
echo "ERROR: docker build must be run from the MET directory: `ls`"; \
exit 1; \
fi

RUN cd ${MET_REPO_DIR} \
&& internal/scripts/docker/build_met_sonarqube.sh
Loading

0 comments on commit 301a14b

Please sign in to comment.