diff --git a/.github/workflows/build_docker_and_trigger_metplus.yml b/.github/workflows/build_docker_and_trigger_metplus.yml index 7d7dcce29c..7d1ab738d8 100644 --- a/.github/workflows/build_docker_and_trigger_metplus.yml +++ b/.github/workflows/build_docker_and_trigger_metplus.yml @@ -5,7 +5,7 @@ on: branches: - develop paths-ignore: - - 'met/docs/**' + - 'docs/**' workflow_dispatch: diff --git a/data/config/GenEnsProdConfig_default b/data/config/GenEnsProdConfig_default index c650ec8b24..16a36f9833 100644 --- a/data/config/GenEnsProdConfig_default +++ b/data/config/GenEnsProdConfig_default @@ -13,7 +13,6 @@ model = "FCST"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/docs/Users_Guide/appendixA.rst b/docs/Users_Guide/appendixA.rst index f39c96913a..384422af1f 100644 --- a/docs/Users_Guide/appendixA.rst +++ b/docs/Users_Guide/appendixA.rst @@ -515,7 +515,7 @@ Q. What is an example of using Grid-Stat with regridding and masking turned on? This tells Grid-Stat to do verification on the "observation" grid. Grid-Stat reads the GFS and Stage4 data and then automatically regrids the GFS data to the Stage4 domain using budget interpolation. - Use "FCST" to verify the forecast domain. And use either a named + Use FCST to verify the forecast domain. And use either a named grid or a grid specification string to regrid both the forecast and observation to a common grid. For example, to_grid = "G212"; will regrid both to NCEP Grid 212 before comparing them. diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index dc81f0cd96..bc051f5a14 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -368,7 +368,7 @@ The Ensemble-Stat, Series-Analysis, MTD and Gen-Ens-Prod tools all have the abil gen_ens_prod ens1.nc ens2.nc ens3.nc ens4.nc -out ens_prod.nc -config GenEnsProd_config -In this case, a user is passing 4 ensemble members to Gen-Ens-Prod to be evaluated, and each member is in a separate file. If a user wishes to use Python embedding to process the ensemble input files, then the same exact command is used however special modifications inside the GenEnsProd_config file are needed. In the config file dictionary, the user must set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate the Python embedding for these tools. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, the user must list the **full path** to the Python script to be run. However, in the Python command, replace the name of the input gridded data file to the Python script with the constant string **MET_PYTHON_INPUT_ARG**. When looping over all of the input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the input file currently being processed and optionally, any command line arguments for the Python script. Here is what this looks like in the GenEnsProd_config file for the above example: +In this case, a user is passing 4 ensemble members to Gen-Ens-Prod to be evaluated, and each member is in a separate file. If a user wishes to use Python embedding to process the ensemble input files, then the same exact command is used; however special modifications inside the GenEnsProd_config file are needed. In the config file dictionary, the user must set the **file_type** entry to either **PYTHON_NUMPY** or **PYTHON_XARRAY** to activate the Python embedding for these tools. Then, in the **name** entry of the config file dictionaries for the forecast or observation data, the user must list the **full path** to the Python script to be run. However, in the Python command, replace the name of the input gridded data file to the Python script with the constant string **MET_PYTHON_INPUT_ARG**. When looping over all of the input files, the MET tools will replace that constant **MET_PYTHON_INPUT_ARG** with the path to the input file currently being processed and optionally, any command line arguments for the Python script. Here is what this looks like in the GenEnsProd_config file for the above example: .. code-block:: :caption: Gen-Ens-Prod MET_PYTHON_INPUT_ARG Config diff --git a/docs/Users_Guide/config_options.rst b/docs/Users_Guide/config_options.rst index 3d892808e0..49885c3a9d 100644 --- a/docs/Users_Guide/config_options.rst +++ b/docs/Users_Guide/config_options.rst @@ -87,26 +87,26 @@ The configuration file language supports the following data types: * The following percentile threshold types are supported: - * "SFP" for a percentile of the sample forecast values. + * SFP for a percentile of the sample forecast values. e.g. ">SFP33.3" means greater than the 33.3-rd forecast percentile. - * "SOP" for a percentile of the sample observation values. + * SOP for a percentile of the sample observation values. e.g. ">SOP75" means greater than the 75-th observation percentile. - * "SFCP" for a percentile of the sample forecast climatology values. + * SFCP for a percentile of the sample forecast climatology values. e.g. ">SFCP90" means greater than the 90-th forecast climatology percentile. - * "SOCP" for a percentile of the sample observation climatology values. + * SOCP for a percentile of the sample observation climatology values. e.g. ">SOCP90" means greater than the 90-th observation climatology percentile. For backward compatibility, the "SCP" threshold type is processed the same as "SOCP". - * "USP" for a user-specified percentile threshold. + * USP for a user-specified percentile threshold. e.g. " 0.0. - * "FCDP" for forecast climatological distribution percentile thresholds. + * FCDP for forecast climatological distribution percentile thresholds. These thresholds require that the forecast climatological mean and standard deviation be defined using the "climo_mean" and "climo_stdev" config file options, respectively. The categorical (cat_thresh), @@ -125,7 +125,7 @@ The configuration file language supports the following data types: e.g. ">FCDP50" means greater than the 50-th percentile of the climatological distribution for each point. - * "OCDP" for observation climatological distribution percentile thresholds. + * OCDP for observation climatological distribution percentile thresholds. The "OCDP" threshold logic matches the "FCDP" logic described above. However these thresholds are defined using the observation climatological mean and standard deviation rather than the forecast climatological data. @@ -138,7 +138,7 @@ The configuration file language supports the following data types: in ensemble_stat), the following special logic is applied. Percentile thresholds of type equality are automatically converted to percentile bins which span the values from 0 to 100. - For example, "==OCDP25" is automatically expanded to 4 percentile bins: + For example, ==OCDP25 is automatically expanded to 4 percentile bins: >=OCDP0&&=OCDP25&&=OCDP50&&=OCDP75&&<=OCDP100 * When sample percentile thresholds of type SFP, SOP, SFCP, SOCP, or FBIAS @@ -160,13 +160,13 @@ The configuration file language supports the following data types: Prior to MET version 12.0.0, forecast climatological inputs were not supported. The observation climatological inputs were used to process - threshold types named "SCP" and "CDP". + threshold types named SCP and CDP. - For backward compatibility, the "SCP" threshold type is processed the same - as "SOCP" and "CDP" the same as "OCDP". + For backward compatibility, the SCP threshold type is processed the same + as SOCP and CDP the same as OCDP. - Users are encouraged to replace the deprecated "SCP" and "CDP" threshold - types with the updated "SOCP" and "OCDP" types, respectively. + Users are encouraged to replace the deprecated SCP and CDP threshold + types with the updated SOCP and OCDP types, respectively. * Piecewise-Linear Function (currently used only by MODE): @@ -351,14 +351,14 @@ values and/or define observation bias corrections. When processing point and gridded observations, Ensemble-Stat searches the table to find the entry defining the observation error information. The table consists of 15 columns and includes a header row defining each column. The -special string "ALL" is interpreted as a wildcard in these files. The first 6 +special string ALL is interpreted as a wildcard in these files. The first 6 columns (OBS_VAR, MESSAGE_TYPE, PB_REPORT_TYPE, IN_REPORT_TYPE, INSTRUMENT_TYPE, and STATION_ID) may be set to a comma-separated list of strings to be matched. In addition, the strings in the OBS_VAR column are interpreted as regular expressions when searching for a match. For example, setting the OBS_VAR column to 'APCP_[0-9]+' would match observations for both APCP_03 and APCP_24. The -HGT_RANGE, VAL_RANGE, and PRS_RANGE columns should either be set to "ALL" or -"BEG,END" where BEG and END specify the range of values to be used. The +HGT_RANGE, VAL_RANGE, and PRS_RANGE columns should either be set to ALL or +BEG,END where BEG and END specify the range of values to be used. The INST_BIAS_SCALE and INST_BIAS_OFFSET columns define instrument bias adjustments which are applied to the observation values. The DIST_TYPE and DIST_PARM columns define the distribution from which random perturbations should be drawn @@ -366,7 +366,7 @@ and applied to the ensemble member values. See the obs_error description below for details on the supported error distributions. The last two columns, MIN and MAX, define the bounds for the valid range of the bias-corrected observation values and randomly perturbed ensemble member values. Values less than MIN are -reset to the mimimum value and values greater than MAX are reset to the maximum +reset to the minimum value and values greater than MAX are reset to the maximum value. A value of NA indicates that the variable is unbounded. MET_GRIB_TABLES @@ -384,7 +384,7 @@ At runtime, the MET tools read default GRIB tables from the installed *share/met/table_files* directory, and their file formats are described below: GRIB1 table files begin with "grib1" prefix and end with a ".txt" suffix. -The first line of the file must contain "GRIB1". +The first line of the file must contain GRIB1. The following lines consist of 4 integers followed by 3 strings: | Column 1: GRIB code (e.g. 11 for temperature) @@ -404,7 +404,7 @@ References: | GRIB2 table files begin with "grib2" prefix and end with a ".txt" suffix. -The first line of the file must contain "GRIB2". +The first line of the file must contain GRIB2. The following lines consist of 8 integers followed by 3 strings. | Column 1: Section 0 Discipline @@ -824,7 +824,7 @@ using the following entries: - width = 4; To regrid using a 4x4 box or circle with diameter 4. * The "shape" entry defines the shape of the neighborhood. - Valid values are "SQUARE" or "CIRCLE" + Valid values are SQUARE or CIRCLE * The "gaussian_dx" entry specifies a delta distance for Gaussian smoothing. The default is 81.271. Ignored if not Gaussian method. @@ -1037,9 +1037,9 @@ to be verified. This dictionary may include the following entries: thresholds to specify which matched pairs should be included in the statistics. These options apply to the Point-Stat and Grid-Stat tools. They are parsed seperately for each "obs.field" array entry. - The "mpr_column" strings specify MPR column names ("FCST", "OBS", - "CLIMO_MEAN", "CLIMO_STDEV", or "CLIMO_CDF"), differences of columns - ("FCST-OBS"), or the absolute value of those differences ("ABS(FCST-OBS)"). + The "mpr_column" strings specify MPR column names (FCST, OBS, + CLIMO_MEAN, CLIMO_STDEV, or CLIMO_CDF), differences of columns + (FCST-OBS), or the absolute value of those differences (ABS(FCST-OBS)). The number of "mpr_thresh" thresholds must match the number of "mpr_column" entries, and the n-th threshold is applied to the n-th column. Any matched pairs which do not meet any of the specified thresholds are excluded from @@ -1170,64 +1170,64 @@ File-format specific settings for the "field" entry: extended PDS for ensembles. Set to "hi_res_ctl", "low_res_ctl", "+n", or "-n", for the n-th ensemble member. - * The "GRIB1_ptv" entry is an integer specifying the GRIB1 parameter + * The GRIB1_ptv entry is an integer specifying the GRIB1 parameter table version number. - * The "GRIB1_code" entry is an integer specifying the GRIB1 code (wgrib + * The GRIB1_code entry is an integer specifying the GRIB1 code (wgrib kpds5 value). - * The "GRIB1_center" is an integer specifying the originating center. + * The GRIB1_center is an integer specifying the originating center. - * The "GRIB1_subcenter" is an integer specifying the originating + * The GRIB1_subcenter is an integer specifying the originating subcenter. - * The "GRIB1_tri" is an integer specifying the time range indicator. + * The GRIB1_tri is an integer specifying the time range indicator. - * The "GRIB2_mtab" is an integer specifying the master table number. + * The GRIB2_mtab is an integer specifying the master table number. - * The "GRIB2_ltab" is an integer specifying the local table number. + * The GRIB2_ltab is an integer specifying the local table number. - * The "GRIB2_disc" is an integer specifying the GRIB2 discipline code. + * The GRIB2_disc is an integer specifying the GRIB2 discipline code. - * The "GRIB2_parm_cat" is an integer specifying the parameter category + * The GRIB2_parm_cat is an integer specifying the parameter category code. - * The "GRIB2_parm" is an integer specifying the parameter code. + * The GRIB2_parm is an integer specifying the parameter code. - * The "GRIB2_pdt" is an integer specifying the product definition + * The GRIB2_pdt is an integer specifying the product definition template (Table 4.0). - * The "GRIB2_process" is an integer specifying the generating process + * The GRIB2_process is an integer specifying the generating process (Table 4.3). - * The "GRIB2_cntr" is an integer specifying the originating center. + * The GRIB2_cntr is an integer specifying the originating center. - * The "GRIB2_ens_type" is an integer specifying the ensemble type + * The GRIB2_ens_type is an integer specifying the ensemble type (Table 4.6). - * The "GRIB2_der_type" is an integer specifying the derived product + * The GRIB2_der_type is an integer specifying the derived product type (Table 4.7). - * The "GRIB2_stat_type" is an integer specifying the statistical + * The GRIB2_stat_type is an integer specifying the statistical processing type (Table 4.10). - * The "GRIB2_perc_val" is an integer specifying the requested percentile + * The GRIB2_perc_val is an integer specifying the requested percentile value (0 to 100) to be used. This applies only to GRIB2 product definition templates 4.6 and 4.10. - * The "GRIB2_aerosol_type" is an integer specifying the aerosol type - (Table 4.233). This applies only to GRIB2 product defintion templates + * The GRIB2_aerosol_type is an integer specifying the aerosol type + (Table 4.233). This applies only to GRIB2 product definition templates 4.46 and 4.48. - * The "GRIB2_aerosol_interval_type" is an integer specifying the aerosol - size interval (Table 4.91). This applies only to GRIB2 product defintion + * The GRIB2_aerosol_interval_type is an integer specifying the aerosol + size interval (Table 4.91). This applies only to GRIB2 product definition templates 4.46 and 4.48. - * The "GRIB2_aerosol_size_lower" and "GRIB2_aerosol_size_upper" are doubles + * The GRIB2_aerosol_size_lower and "GRIB2_aerosol_size_upper" are doubles specifying the endpoints of the aerosol size interval. These applies only to GRIB2 product defintion templates 4.46 and 4.48. - * The "GRIB2_ipdtmpl_index" and "GRIB2_ipdtmpl_val" entries are arrays + * The GRIB2_ipdtmpl_index and GRIB2_ipdtmpl_val entries are arrays of integers which specify the product description template values to be used. The indices are 0-based. For example, use the following to request a GRIB2 record whose 9-th and 27-th product description @@ -1722,13 +1722,13 @@ mask_missing_flag The "mask_missing_flag" entry specifies how missing data should be handled in the Wavelet-Stat and MODE tools: - * "NONE" to perform no masking of missing data + * NONE to perform no masking of missing data - * "FCST" to mask the forecast field with missing observation data + * FCST to mask the forecast field with missing observation data - * "OBS" to mask the observation field with missing forecast data + * OBS to mask the observation field with missing forecast data - * "BOTH" to mask both fields with missing data from the other + * BOTH to mask both fields with missing data from the other .. code-block:: none @@ -1930,10 +1930,10 @@ should be used for computing bootstrap confidence intervals: * The "interval" entry specifies the confidence interval method: - * "BCA" for the BCa (bias-corrected percentile) interval method is + * BCA for the BCa (bias-corrected percentile) interval method is highly accurate but computationally intensive. - * "PCTILE" uses the percentile method which is somewhat less accurate + * PCTILE uses the percentile method which is somewhat less accurate but more efficient. * The "rep_prop" entry specifies a proportion between 0 and 1 to define @@ -1995,11 +1995,11 @@ This dictionary may include the following entries: should be applied. This does not apply when doing point verification with the Point-Stat or Ensemble-Stat tools: - * "FCST" to interpolate/smooth the forecast field. + * FCST to interpolate/smooth the forecast field. - * "OBS" to interpolate/smooth the observation field. + * OBS to interpolate/smooth the observation field. - * "BOTH" to interpolate/smooth both the forecast and the observation. + * BOTH to interpolate/smooth both the forecast and the observation. * The "vld_thresh" entry specifies a number between 0 and 1. When performing interpolation over some neighborhood of points the ratio of @@ -2186,7 +2186,7 @@ This dictionary may include the following entries: output line and used for computing probabilistic statistics. * The "shape" entry defines the shape of the neighborhood. - Valid values are "SQUARE" or "CIRCLE" + Valid values are SQUARE or CIRCLE * The "prob_cat_thresh" entry defines the thresholds which define ensemble probabilities from which to compute the ranked probability score output. @@ -2212,11 +2212,11 @@ The "output_flag" entry is a dictionary that specifies what verification methods should be applied to the input data. Options exist for each output line type from the MET tools. Each line type may be set to one of: -* "NONE" to skip the corresponding verification method +* NONE to skip the corresponding verification method -* "STAT" to write the verification output only to the ".stat" output file +* STAT to write the verification output only to the ".stat" output file -* "BOTH" to write to the ".stat" output file as well the optional +* BOTH to write to the ".stat" output file as well the optional "_type.txt" file, a more readable ASCII file sorted by line type. .. code-block:: none @@ -2353,12 +2353,12 @@ Lat/Lon grids. It is only applied for grid-to-grid verification in Grid-Stat and Ensemble-Stat and is not applied for grid-to-point verification. Three grid weighting options are currently supported: -* "NONE" to disable grid weighting using a constant weight (default). +* NONE to disable grid weighting using a constant weight (default). -* "COS_LAT" to define the weight as the cosine of the grid point latitude. +* COS_LAT to define the weight as the cosine of the grid point latitude. This an approximation for grid box area used by NCEP and WMO. -* "AREA" to define the weight as the true area of the grid box (km^2). +* AREA to define the weight as the true area of the grid box (km^2). The weights are ultimately computed as the weight at each grid point divided by the sum of the weights for the current masking region. @@ -2403,9 +2403,9 @@ duplicate_flag The "duplicate_flag" entry specifies how to handle duplicate point observations in Point-Stat and Ensemble-Stat: -* "NONE" to use all point observations (legacy behavior) +* NONE to use all point observations (legacy behavior) -* "UNIQUE" only use a single observation if two or more observations +* UNIQUE only use a single observation if two or more observations match. Matching observations are determined if they contain identical latitude, longitude, level, elevation, and time information. They may contain different observation values or station IDs @@ -2427,23 +2427,23 @@ observations that appear at a single location (lat,lon,level,elev) in Point-Stat and Ensemble-Stat. Eight techniques are currently supported: -* "NONE" to use all point observations (legacy behavior) +* NONE to use all point observations (legacy behavior) -* "NEAREST" use only the observation that has the valid +* NEAREST use only the observation that has the valid time closest to the forecast valid time -* "MIN" use only the observation that has the lowest value +* MIN use only the observation that has the lowest value -* "MAX" use only the observation that has the highest value +* MAX use only the observation that has the highest value -* "UW_MEAN" compute an unweighted mean of the observations +* UW_MEAN compute an unweighted mean of the observations -* "DW_MEAN" compute a weighted mean of the observations based +* DW_MEAN compute a weighted mean of the observations based on the time of the observation -* "MEDIAN" use the median observation +* MEDIAN use the median observation -* "PERC" use the Nth percentile observation where N = obs_perc_value +* PERC use the Nth percentile observation where N = obs_perc_value The reporting mechanism for this feature can be activated by specifying a verbosity level of three or higher. The report will show information @@ -3204,7 +3204,7 @@ Floating-point max/min options: Setting limits on various floating-point attributes. One may specify these as integers (i.e., without a decimal point), if desired. The following pairs of options indicate minimum and maximum values for each MODE attribute that can be described as a floating- -point number. Please refer to "The MODE Tool" section on attributes in the +point number. Please refer to :ref:`mode-attributes` in the MET User's Guide for a description of these attributes. .. code-block:: none @@ -3371,14 +3371,14 @@ The object definition settings for MODE are contained within the "fcst" and * The "merge_flag" entry specifies the merging methods to be applied: - * "NONE" for no merging + * NONE for no merging - * "THRESH" for the double-threshold merging method. Merge objects + * THRESH for the double-threshold merging method. Merge objects that would be part of the same object at the lower threshold. - * "ENGINE" for the fuzzy logic approach comparing the field to itself + * ENGINE for the fuzzy logic approach comparing the field to itself - * "BOTH" for both the double-threshold and engine merging methods + * BOTH for both the double-threshold and engine merging methods .. code-block:: none @@ -3417,15 +3417,15 @@ match_flag The "match_flag" entry specifies the matching method to be applied: -* "NONE" for no matching between forecast and observation objects +* NONE for no matching between forecast and observation objects -* "MERGE_BOTH" for matching allowing additional merging in both fields. +* MERGE_BOTH for matching allowing additional merging in both fields. If two objects in one field match the same object in the other field, those two objects are merged. -* "MERGE_FCST" for matching allowing only additional forecast merging +* MERGE_FCST for matching allowing only additional forecast merging -* "NO_MERGE" for matching with no additional merging in either field +* NO_MERGE for matching with no additional merging in either field .. code-block:: none @@ -3665,9 +3665,9 @@ In the PB2NC tool, the "message_type" entry is an array of message types to be retained. An empty list indicates that all should be retained. | List of valid message types: -| ADPUPA AIRCAR AIRCFT ADPSFC ERS1DA GOESND GPSIPW -| MSONET PROFLR QKSWND RASSDA SATEMP SATWND SFCBOG -| SFCSHP SPSSMI SYNDAT VADWND +| “ADPUPA”, “AIRCAR”, “AIRCFT”, “ADPSFC”, “ERS1DA”, “GOESND”, “GPSIPW”, +| “MSONET”, “PROFLR”, “QKSWND”, “RASSDA”, “SATEMP”, +| “SATWND”, “SFCBOG”, “SFCSHP”, “SPSSMI”, “SYNDAT”, “VADWND” For example: @@ -3885,7 +3885,7 @@ See `Code table for observation quality markers `) and using the empirical ensemble distribution (:ref:`Hersbach, 2000 `). The CRPS statistic using the empirical ensemble distribution can be adjusted (bias corrected) by subtracting 1/(2*m) times the mean absolute difference of the ensemble members, where m is the ensemble size. This is reported as a separate statistic called CRPS_EMP_FAIR. The empirical CRPS and its fair version are included in the Ensemble Continuous Statistics (ECNT) line type, along with other statistics quantifying the ensemble spread and ensemble mean skill. -The Ensemble-Stat tool can derive ensemble relative frequencies and verify them as probability forecasts all in the same run. Note however that these simple ensemble relative frequencies are not actually calibrated probability forecasts. If probabilistic line types are requested (output_flag), this logic is applied to each pair of fields listed in the forecast (fcst) and observation (obs) dictionaries of the configuration file. Each probability category threshold (prob_cat_thresh) listed for the forecast field is applied to the input ensemble members to derive a relative frequency forecast. The probability category threshold (prob_cat_thresh) parsed from the corresponding observation entry is applied to the (gridded or point) observations to determine whether or not the event actually occurred. The paired ensemble relative freqencies and observation events are used to populate an Nx2 probabilistic contingency table. The dimension of that table is determined by the probability PCT threshold (prob_pct_thresh) configuration file option parsed from the forecast dictionary. All probabilistic output types requested are derived from the this Nx2 table and written to the ascii output files. Note that the FCST_VAR name header column is automatically reset as "PROB({FCST_VAR}{THRESH})" where {FCST_VAR} is the current field being evaluated and {THRESH} is the threshold that was applied. +The Ensemble-Stat tool can derive ensemble relative frequencies and verify them as probability forecasts all in the same run. Note however that these simple ensemble relative frequencies are not actually calibrated probability forecasts. If probabilistic line types are requested (output_flag), this logic is applied to each pair of fields listed in the forecast (fcst) and observation (obs) dictionaries of the configuration file. Each probability category threshold (prob_cat_thresh) listed for the forecast field is applied to the input ensemble members to derive a relative frequency forecast. The probability category threshold (prob_cat_thresh) parsed from the corresponding observation entry is applied to the (gridded or point) observations to determine whether or not the event actually occurred. The paired ensemble relative frequencies and observation events are used to populate an Nx2 probabilistic contingency table. The dimension of that table is determined by the probability PCT threshold (prob_pct_thresh) configuration file option parsed from the forecast dictionary. All probabilistic output types requested are derived from this Nx2 table and written to the ascii output files. Note that the FCST_VAR name header column is automatically reset as "PROB({FCST_VAR}{THRESH})" where {FCST_VAR} is the current field being evaluated and {THRESH} is the threshold that was applied. Note that if no probability category thresholds (prob_cat_thresh) are defined, but climatological mean and standard deviation data is provided along with climatological bins, climatological distribution percentile thresholds are automatically derived and used to compute probabilistic outputs. diff --git a/docs/Users_Guide/masking.rst b/docs/Users_Guide/masking.rst index 0d705ac06e..5dd8fe72d8 100644 --- a/docs/Users_Guide/masking.rst +++ b/docs/Users_Guide/masking.rst @@ -178,4 +178,4 @@ In this example, the Gen-Vx-Mask tool will read the ASCII Lat/Lon file named **C Feature-Relative Methods ======================== -This section contains a description of several methods that may be used to perform feature-relative (or event -based) evaluation. The methodology pertains to examining the environment surrounding a particular feature or event such as a tropical, extra-tropical cyclone, convective cell, snow-band, etc. Several approaches are available for these types of investigations including applying masking described above (e.g. circle or box) or using the "FORCE" interpolation method in the regrid configuration option (see :numref:`config_options`). These methods generally require additional scripting, including potentially storm-track identification, outside of MET to be paired with the features of the MET tools. METplus may be used to execute this type of analysis. Please refer to the `METplus User's Guide `_. +This section contains a description of several methods that may be used to perform feature-relative (or event -based) evaluation. The methodology pertains to examining the environment surrounding a particular feature or event such as a tropical, extra-tropical cyclone, convective cell, snow-band, etc. Several approaches are available for these types of investigations including applying masking described above (e.g. circle or box) or using the FORCE interpolation method in the regrid configuration option (see :numref:`config_options`). These methods generally require additional scripting, including potentially storm-track identification, outside of MET to be paired with the features of the MET tools. METplus may be used to execute this type of analysis. Please refer to the `METplus User's Guide `_. diff --git a/docs/Users_Guide/mode.rst b/docs/Users_Guide/mode.rst index bb59cfee3e..2dc4bc3e96 100644 --- a/docs/Users_Guide/mode.rst +++ b/docs/Users_Guide/mode.rst @@ -57,6 +57,7 @@ An example of the steps involved in resolving objects is shown in :numref:`mode- Example of an application of the MODE object identification process to a model precipitation field. +.. _mode-attributes: Attributes ---------- diff --git a/docs/Users_Guide/overview.rst b/docs/Users_Guide/overview.rst index 37cf16b404..1e0d362bb4 100644 --- a/docs/Users_Guide/overview.rst +++ b/docs/Users_Guide/overview.rst @@ -62,7 +62,7 @@ The Grid-Diag tool produces multivariate probability density functions (PDFs) th The Wavelet-Stat tool decomposes two-dimensional forecasts and observations according to the Intensity-Scale verification technique described by :ref:`Casati et al. (2004) `. There are many types of spatial verification approaches and the Intensity-Scale technique belongs to the scale-decomposition (or scale-separation) verification approaches. The spatial scale components are obtained by applying a wavelet transformation to the forecast and observation fields. The resulting scale-decomposition measures error, bias and skill of the forecast on each spatial scale. Information is provided on the scale dependency of the error and skill, on the no-skill to skill transition scale, and on the ability of the forecast to reproduce the observed scale structure. The Wavelet-Stat tool is primarily used for precipitation fields. However, the tool can be applied to other variables, such as cloud fraction. -Results from the statistical analysis stage are output in ASCII, NetCDF and Postscript formats. The Point-Stat, Grid-Stat, Wavelet-Stat, and Ensemble-Stat tools create STAT (statistics) files which are tabular ASCII files ending with a ".stat" suffix. The STAT output files consist of multiple line types, each containing a different set of related statistics. The columns preceeding the LINE_TYPE column are common to all lines. However, the number and contents of the remaining columns vary by line type. +Results from the statistical analysis stage are output in ASCII, NetCDF and Postscript formats. The Point-Stat, Grid-Stat, Wavelet-Stat, and Ensemble-Stat tools create STAT (statistics) files which are tabular ASCII files ending with a ".stat" suffix. The STAT output files consist of multiple line types, each containing a different set of related statistics. The columns preceding the LINE_TYPE column are common to all lines. However, the number and contents of the remaining columns vary by line type. The Stat-Analysis and MODE-Analysis tools aggregate the output statistics from the previous steps across multiple cases. The Stat-Analysis tool reads the STAT output of Point-Stat, Grid-Stat, Ensemble-Stat, and Wavelet-Stat and can be used to filter the STAT data and produce aggregated continuous and categorical statistics. Stat-Analysis also reads matched pair data (i.e. MPR line type) via python embedding. The MODE-Analysis tool reads the ASCII output of the MODE tool and can be used to produce summary information about object location, size, and intensity (as well as other object characteristics) across one or more cases. diff --git a/docs/Users_Guide/point-stat.rst b/docs/Users_Guide/point-stat.rst index 70e3847b79..d6de2d32b1 100644 --- a/docs/Users_Guide/point-stat.rst +++ b/docs/Users_Guide/point-stat.rst @@ -23,7 +23,7 @@ Interpolation/Matching Methods This section provides information about the various methods available in MET to match gridded model output to point observations. Matching in the vertical and horizontal are completed separately using different methods. -In the vertical, if forecasts and observations are at the same vertical level, then they are paired as-is. If any discrepancy exists between the vertical levels, then the forecasts are interpolated to the level of the observation. The vertical interpolation is done in the natural log of pressure coordinates, except for specific humidity, which is interpolated using the natural log of specific humidity in the natural log of pressure coordinates. Vertical interpolation for heights above ground are done linear in height coordinates. When forecasts are for the surface, no interpolation is done. They are matched to observations with message types that are mapped to **SURFACE** in the **message_type_group_map** configuration option. By default, the surface message types include ADPSFC, SFCSHP, and MSONET. The regular expression is applied to the message type list at the message_type_group_map. The derived message types from the time summary ("ADPSFC_MIN_hhmmss" and "ADPSFC_MAX_hhmmss") are accepted as "ADPSFC". +In the vertical, if forecasts and observations are at the same vertical level, then they are paired as-is. If any discrepancy exists between the vertical levels, then the forecasts are interpolated to the level of the observation. The vertical interpolation is done in the natural log of pressure coordinates, except for specific humidity, which is interpolated using the natural log of specific humidity in the natural log of pressure coordinates. Vertical interpolation for heights above ground are done linear in height coordinates. When forecasts are for the surface, no interpolation is done. They are matched to observations with message types that are mapped to "SURFACE" in the **message_type_group_map** configuration option. By default, the surface message types include ADPSFC, SFCSHP, and MSONET. The regular expression is applied to the message type list at the message_type_group_map. The derived message types from the time summary ("ADPSFC_MIN_hhmmss" and "ADPSFC_MAX_hhmmss") are accepted as "ADPSFC". To match forecasts and observations in the horizontal plane, the user can select from a number of methods described below. Many of these methods require the user to define the width of the forecast grid W, around each observation point P, that should be considered. In addition, the user can select the interpolation shape, either a SQUARE or a CIRCLE. For example, a square of width 2 defines the 2 x 2 set of grid points enclosing P, or simply the 4 grid points closest to P. A square of width of 3 defines a 3 x 3 square consisting of 9 grid points centered on the grid point closest to P. :numref:`point_stat_fig1` provides illustration. The point P denotes the observation location where the interpolated value is calculated. The interpolation width W, shown is five. diff --git a/docs/Users_Guide/reformat_point.rst b/docs/Users_Guide/reformat_point.rst index fefe71eef9..d9ad8695c1 100644 --- a/docs/Users_Guide/reformat_point.rst +++ b/docs/Users_Guide/reformat_point.rst @@ -454,7 +454,7 @@ While initial versions of the ASCII2NC tool only supported a simple 11 column AS • `AirNow DailyData_v2, AirNow HourlyData, and AirNow HourlyAQObs formats `_. See the :ref:`MET_AIRNOW_STATIONS` environment variable. -• `National Data Buoy (NDBC) Standard Meteorlogical Data format `_. See the :ref:`MET_NDBC_STATIONS` environment variable. +• `National Data Buoy (NDBC) Standard Meteorological Data format `_. See the :ref:`MET_NDBC_STATIONS` environment variable. • `International Soil Moisture Network (ISMN) Data format `_. diff --git a/docs/Users_Guide/stat-analysis.rst b/docs/Users_Guide/stat-analysis.rst index 92672edc26..0b87586d09 100644 --- a/docs/Users_Guide/stat-analysis.rst +++ b/docs/Users_Guide/stat-analysis.rst @@ -324,7 +324,7 @@ The configuration file for the Stat-Analysis tool is optional. Users may find it Most of the user-specified parameters listed in the Stat-Analysis configuration file are used to filter the ASCII statistical output from the MET statistics tools down to a desired subset of lines over which statistics are to be computed. Only output that meets all of the parameters specified in the Stat-Analysis configuration file will be retained. -The Stat-Analysis tool actually performs a two step process when reading input data. First, it stores the filtering information defined top section of the configuration file. It applies that filtering criteria when reading the input STAT data and writes the filtered data out to a temporary file, as described in :numref:`Contributor's Guide Section %s `. Second, each job defined in the **jobs** entry reads data from that temporary file and performs the task defined for the job. After all jobs have run, the Stat-Analysis tool deletes the temporary file. +The Stat-Analysis tool actually performs a two step process when reading input data. First, it stores the filtering information in the defined top section of the configuration file. It applies that filtering criteria when reading the input STAT data and writes the filtered data out to a temporary file, as described in :numref:`Contributor's Guide Section %s `. Second, each job defined in the **jobs** entry reads data from that temporary file and performs the task defined for the job. After all jobs have run, the Stat-Analysis tool deletes the temporary file. This two step process enables the Stat-Analysis tool to run more efficiently when many jobs are defined in the configuration file. If only operating on a small subset of the input data, the common filtering criteria can be applied once rather than re-applying it for each job. In general, filtering criteria common to all tasks defined in the **jobs** entry should be moved to the top section of the configuration file. diff --git a/docs/Users_Guide/tc-pairs.rst b/docs/Users_Guide/tc-pairs.rst index c7a56a05ff..cc1c7dc2cd 100644 --- a/docs/Users_Guide/tc-pairs.rst +++ b/docs/Users_Guide/tc-pairs.rst @@ -211,7 +211,7 @@ The **consensus** array allows users to derive consensus forecasts from any numb - The **members** field is a comma-separated array of model ID stings which define the members of the consensus. - The **required** field is a comma-separated array of true/false values associated with each consensus member. If a member is designated as true, that member must be present in order for the consensus to be generated. If a member is false, the consensus will be generated regardless of whether or not the member is present. The required array can either be empty or have the same length as the members array. If empty, it defaults to all false. - The **min_req** field is the number of members required in order for the consensus to be computed. The **required** and **min_req** field options are applied at each forecast lead time. If any member of the consensus has a non-valid position or intensity value, the consensus for that valid time will not be generated. -- Tropical cyclone diagnostics, if provided on the command line, are included in the computation of consensus tracks. The consensus diagnostics are computed as the mean of the diagnostics for the members. The **diag_required** and **min_diag_req** entries apply the same logic described above, but to the computation of each consensus diagnostic value rather than the consensus track location and intensity. If **diag_required** is missing or an empty list, it defaults to all false. If **min_diag_req** is missing, it default to 0. +- Tropical cyclone diagnostics, if provided on the command line, are included in the computation of consensus tracks. The consensus diagnostics are computed as the mean of the diagnostics for the members. The **diag_required** and **min_diag_req** entries apply the same logic described above, but to the computation of each consensus diagnostic value rather than the consensus track location and intensity. If **diag_required** is missing or an empty list, it defaults to all false. If **min_diag_req** is missing, it defaults to 0. - The **write_members** field is a boolean that indicates whether or not to write track output for the individual consensus members. If set to true, standard output will show up for all members. If set to false, output for the consensus members is excluded from the output, even if they are used to define other consensus tracks in the configuration file. Users should take care to avoid filtering out track data for the consensus members with the **model** field, described above. Either set **model** to an empty list to process all input track data or include all of the consensus members in the **model** list. Use the **write_members** field, not the **model** field, to suppress track output for consensus members. diff --git a/internal/test_unit/config/GenEnsProdConfig b/internal/test_unit/config/GenEnsProdConfig index 813272dc14..9841006614 100644 --- a/internal/test_unit/config/GenEnsProdConfig +++ b/internal/test_unit/config/GenEnsProdConfig @@ -13,7 +13,6 @@ model = "FCST"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/internal/test_unit/config/GenEnsProdConfig_climo_anom_ens_member_id b/internal/test_unit/config/GenEnsProdConfig_climo_anom_ens_member_id index adebdb2528..440b528326 100644 --- a/internal/test_unit/config/GenEnsProdConfig_climo_anom_ens_member_id +++ b/internal/test_unit/config/GenEnsProdConfig_climo_anom_ens_member_id @@ -13,7 +13,6 @@ model = "CFSv2"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/internal/test_unit/config/GenEnsProdConfig_normalize b/internal/test_unit/config/GenEnsProdConfig_normalize index b23708ab46..192c75cb5b 100644 --- a/internal/test_unit/config/GenEnsProdConfig_normalize +++ b/internal/test_unit/config/GenEnsProdConfig_normalize @@ -13,7 +13,6 @@ model = "FCST"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/internal/test_unit/config/GenEnsProdConfig_single_file_grib b/internal/test_unit/config/GenEnsProdConfig_single_file_grib index b1f2bb3315..82f31da619 100644 --- a/internal/test_unit/config/GenEnsProdConfig_single_file_grib +++ b/internal/test_unit/config/GenEnsProdConfig_single_file_grib @@ -13,7 +13,6 @@ model = "GEFS"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/internal/test_unit/config/GenEnsProdConfig_single_file_nc b/internal/test_unit/config/GenEnsProdConfig_single_file_nc index 2b4be6e12b..9d84b2bcbc 100644 --- a/internal/test_unit/config/GenEnsProdConfig_single_file_nc +++ b/internal/test_unit/config/GenEnsProdConfig_single_file_nc @@ -13,7 +13,6 @@ model = "CFSv2"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG b/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG index ab1cdd8362..8783cbd9e1 100644 --- a/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG +++ b/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG @@ -75,16 +75,24 @@ fcst = { climo_mean = { field = field_list; file_name = [ "${FCST_CLIMO_DIR}/cmean_1d.19590410" ]; + + regrid = { + method = BILIN; + width = 2; + vld_thresh = 0.5; + shape = SQUARE; + } + + time_interp_method = DW_MEAN; + day_interval = 1; + hour_interval = 6; }; + climo_stdev = climo_mean; climo_stdev = { - field = field_list; file_name = [ "${FCST_CLIMO_DIR}/cstdv_1d.19590410" ]; }; - time_interp_method = DW_MEAN; - day_interval = 1; - hour_interval = 6; } obs = { @@ -99,18 +107,24 @@ obs = { "${OBS_CLIMO_DIR}/u850hPa_mean.grib", "${OBS_CLIMO_DIR}/v500hPa_mean.grib", "${OBS_CLIMO_DIR}/v850hPa_mean.grib" ]; + regrid = { + method = BILIN; + width = 2; + vld_thresh = 0.5; + shape = SQUARE; + } + + time_interp_method = DW_MEAN; + day_interval = 1; + hour_interval = 12; }; + climo_stdev = climo_mean; climo_stdev = { - field = field_list; file_name = [ "${OBS_CLIMO_DIR}/t850hPa_stdev.grib", "${OBS_CLIMO_DIR}/u850hPa_stdev.grib", "${OBS_CLIMO_DIR}/v850hPa_stdev.grib" ]; }; - - time_interp_method = DW_MEAN; - day_interval = 1; - hour_interval = 12; } //////////////////////////////////////////////////////////////////////////////// diff --git a/internal/test_unit/config/SeriesAnalysisConfig_const_climo b/internal/test_unit/config/SeriesAnalysisConfig_const_climo new file mode 100644 index 0000000000..df991f208e --- /dev/null +++ b/internal/test_unit/config/SeriesAnalysisConfig_const_climo @@ -0,0 +1,162 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// Series-Analysis configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Output model name to be written +// +model = "GFS"; + +// +// Output description to be written +// +desc = "NA"; + +// +// Output observation type to be written +// +obtype = "GFSANL"; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification grid +// +regrid = { + to_grid = NONE; + method = NEAREST; + width = 1; + vld_thresh = 0.5; +} + +//////////////////////////////////////////////////////////////////////////////// + +censor_thresh = []; +censor_val = []; +cat_thresh = []; +cnt_thresh = [ NA ]; +cnt_logic = UNION; + +// +// Forecast and observation fields to be verified +// +fcst = { + field = [ + { name = "TMP"; level = "P850"; valid_time = "20120409_12"; }, + { name = "TMP"; level = "P850"; valid_time = "20120410_00"; }, + { name = "TMP"; level = "P850"; valid_time = "20120410_12"; } + ]; +} +obs = { + field = [ + { name = "TMP"; level = "P850"; valid_time = "20120409_12"; }, + { name = "TMP"; level = "P850"; valid_time = "20120410_00"; }, + { name = "TMP"; level = "P850"; valid_time = "20120410_12"; } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Climatology data +// +climo_mean = fcst; +climo_mean = { + + file_name = [ ${CLIMO_MEAN_FILE_LIST} ]; + + field = [ + { name = "TMP"; level = "P850"; valid_time = "19590409_00"; } + ]; + + regrid = { + method = BILIN; + width = 2; + vld_thresh = 0.5; + } + + time_interp_method = NEAREST; + day_interval = NA; + hour_interval = NA; +} + +climo_stdev = climo_mean; +climo_stdev = { + file_name = [ ${CLIMO_STDEV_FILE_LIST} ]; +} + +climo_cdf = { + cdf_bins = 1; + center_bins = FALSE; + direct_prob = FALSE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Confidence interval settings +// +ci_alpha = [ 0.05 ]; + +boot = { + interval = PCTILE; + rep_prop = 1.0; + n_rep = 0; + rng = "mt19937"; + seed = "1"; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification masking regions +// +mask = { + grid = ""; + poly = ""; +} + +// +// Number of grid points to be processed concurrently. Set smaller to use less +// memory but increase the number of passes through the data. If set <= 0, all +// grid points are processed concurrently. +// +block_size = 0; + +// +// Ratio of valid matched pairs to compute statistics for a grid point +// +vld_thresh = 0.5; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Statistical output types +// +output_stats = { + fho = [ ]; + ctc = [ ]; + cts = [ ]; + mctc = [ ]; + mcts = [ ]; + cnt = [ "TOTAL", "RMSE", "ANOM_CORR" ]; + sl1l2 = [ ]; + sal1l2 = [ ]; + pct = [ ]; + pstd = [ ]; + pjc = [ ]; + prc = [ ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +hss_ec_value = NA; +rank_corr_flag = FALSE; +tmp_dir = "/tmp"; +version = "V12.0.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/internal/test_unit/xml/unit_climatology_1.0deg.xml b/internal/test_unit/xml/unit_climatology_1.0deg.xml index fcd6b59668..699026825e 100644 --- a/internal/test_unit/xml/unit_climatology_1.0deg.xml +++ b/internal/test_unit/xml/unit_climatology_1.0deg.xml @@ -186,6 +186,34 @@ + + &MET_BIN;/series_analysis + + CLIMO_MEAN_FILE_LIST + "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590409" + + + CLIMO_STDEV_FILE_LIST + "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590409" + + + + \ + -fcst &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F012.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F024.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F036.grib2 \ + -obs &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_1200_000.grb2 \ + &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0000_000.grb2 \ + &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_1200_000.grb2 \ + -out &OUTPUT_DIR;/climatology_1.0deg/series_analysis_GFS_CLIMO_1.0DEG_CONST_CLIMO.nc \ + -config &CONFIG_DIR;/SeriesAnalysisConfig_const_climo \ + -v 3 + + + &OUTPUT_DIR;/climatology_1.0deg/series_analysis_GFS_CLIMO_1.0DEG_CONST_CLIMO.nc + + + &MET_BIN;/series_analysis diff --git a/scripts/config/GenEnsProdConfig b/scripts/config/GenEnsProdConfig index 74350a328d..65d13aadbd 100644 --- a/scripts/config/GenEnsProdConfig +++ b/scripts/config/GenEnsProdConfig @@ -13,7 +13,6 @@ model = "FCST"; // // Output description to be written -// May be set separately in each "obs.field" entry // desc = "NA"; diff --git a/src/basic/vx_cal/is_leap_year.cc b/src/basic/vx_cal/is_leap_year.cc index d37854d690..a383041475 100644 --- a/src/basic/vx_cal/is_leap_year.cc +++ b/src/basic/vx_cal/is_leap_year.cc @@ -102,7 +102,7 @@ void adjuste_day_for_month_year_units(int &day, int &month, int &year, double mo // Compute remaining days from the month fraction bool day_adjusted = false; const int day_offset = (int)(month_fraction * DAYS_PER_MONTH + 0.5); - const char *method_name = "adjuste_day() --> "; + const char *method_name = "adjuste_day_for_month_year_units() -> "; day += day_offset; if (day == 1 && abs(month_fraction-0.5) < DAY_EPSILON) { @@ -162,7 +162,7 @@ unixtime add_to_unixtime(unixtime base_unixtime, int sec_per_unit, unixtime ut; auto time_value_ut = (unixtime)time_value; double time_fraction = time_value - (double)time_value_ut; - const char *method_name = "add_to_unixtime() -->"; + const char *method_name = "add_to_unixtime() -> "; if (sec_per_unit == SEC_MONTH || sec_per_unit == SEC_YEAR) { if (time_value < 0) { diff --git a/src/basic/vx_config/config_constants.h b/src/basic/vx_config/config_constants.h index e1a18aeb1a..7bba9e759e 100644 --- a/src/basic/vx_config/config_constants.h +++ b/src/basic/vx_config/config_constants.h @@ -297,7 +297,7 @@ struct InterpInfo { void clear(); void validate(); // Ensure that width and method are accordant bool operator==(const InterpInfo &) const; - InterpInfo &operator=(const InterpInfo &a) noexcept; // SoanrQube findings + InterpInfo &operator=(const InterpInfo &a) noexcept; // SonarQube findings }; //////////////////////////////////////////////////////////////////////// @@ -329,6 +329,7 @@ struct RegridInfo { void validate(); // ensure that width and method are accordant void validate_point(); // ensure that width and method are accordant RegridInfo &operator=(const RegridInfo &a) noexcept; // SoanrQube findings + ConcatString get_str() const; }; //////////////////////////////////////////////////////////////////////// @@ -725,12 +726,10 @@ static const char conf_key_is_prob[] = "is_prob"; // // Climatology data parameter key names // -static const char conf_key_climo_mean_field[] = "climo_mean.field"; -static const char conf_key_fcst_climo_mean_field[] = "fcst.climo_mean.field"; -static const char conf_key_obs_climo_mean_field[] = "obs.climo_mean.field"; -static const char conf_key_climo_stdev_field[] = "climo_stdev.field"; -static const char conf_key_fcst_climo_stdev_field[] = "fcst.climo_stdev.field"; -static const char conf_key_obs_climo_stdev_field[] = "obs.climo_stdev.field"; +static const char conf_key_climo_mean[] = "climo_mean"; +static const char conf_key_climo_mean_field[] = "climo_mean.field"; +static const char conf_key_climo_stdev[] = "climo_stdev"; +static const char conf_key_climo_stdev_field[] = "climo_stdev.field"; // // Climatology distribution parameter key names diff --git a/src/basic/vx_config/config_util.cc b/src/basic/vx_config/config_util.cc index 344f997bea..5cce67dfcb 100644 --- a/src/basic/vx_config/config_util.cc +++ b/src/basic/vx_config/config_util.cc @@ -14,6 +14,7 @@ #include "config_util.h" #include "enum_as_int.hpp" +#include "configobjecttype_to_string.h" #include "vx_math.h" #include "vx_util.h" @@ -265,6 +266,13 @@ RegridInfo &RegridInfo::operator=(const RegridInfo &a) noexcept { return *this; } +/////////////////////////////////////////////////////////////////////////////// + +ConcatString RegridInfo::get_str() const { + ConcatString cs(interpmthd_to_string(method)); + cs << "(" << width << ")"; + return cs; +} /////////////////////////////////////////////////////////////////////////////// @@ -1331,13 +1339,10 @@ BootInfo parse_conf_boot(Dictionary *dict) { return info; } - /////////////////////////////////////////////////////////////////////////////// -RegridInfo parse_conf_regrid(Dictionary *dict, bool error_out) { - Dictionary *regrid_dict = (Dictionary *) nullptr; +RegridInfo parse_conf_regrid(Dictionary *dict, RegridInfo *default_info, bool error_out) { RegridInfo info; - int v; if(!dict) { mlog << Error << "\nparse_conf_regrid() -> " @@ -1346,10 +1351,10 @@ RegridInfo parse_conf_regrid(Dictionary *dict, bool error_out) { } // Conf: regrid - regrid_dict = dict->lookup_dictionary(conf_key_regrid, false); + Dictionary *regrid_dict = dict->lookup_dictionary(conf_key_regrid, false); // Check that the regrid dictionary is present - if(!regrid_dict) { + if(!regrid_dict && !default_info) { if(error_out) { mlog << Error << "\nparse_conf_regrid() -> " << "can't find the \"regrid\" dictionary!\n\n"; @@ -1360,61 +1365,164 @@ RegridInfo parse_conf_regrid(Dictionary *dict, bool error_out) { } } - // Parse to_grid as an integer - v = regrid_dict->lookup_int(conf_key_to_grid, false, false); + // Conf: to_grid (optional) as an integer or string + const DictionaryEntry * entry = nullptr; + + if(regrid_dict) entry = regrid_dict->lookup(conf_key_to_grid, false); + + // to_grid found + if(entry) { - // If integer lookup successful, convert to FieldType. - if(regrid_dict->last_lookup_status()) { - info.field = int_to_fieldtype(v); - info.enable = (info.field == FieldType::Fcst || - info.field == FieldType::Obs); + // Convert integer to FieldType + if(entry->type() == IntegerType) { + info.field = int_to_fieldtype(entry->i_value()); + info.enable = (info.field == FieldType::Fcst || + info.field == FieldType::Obs); + } + // Store grid name string + else if(entry->type() == StringType) { + info.name = entry->string_value(); + info.enable = true; + } + else { + mlog << Error << "\nparse_conf_regrid() -> " + << "Unexpected type (" + << configobjecttype_to_string(entry->type()) + << ") for \"" << conf_key_to_grid + << "\" configuration entry.\n\n"; + exit(1); + } + } + // Use default RegridInfo + else if(default_info){ + info.name = default_info->name; + info.enable = default_info->enable; } - // If integer lookup unsuccessful, parse vx_grid as a string. - // Do not error out since to_grid isn't specified for climo.regrid. + // Use global default else { - info.name = regrid_dict->lookup_string(conf_key_to_grid, false); + info.name = ""; info.enable = true; } - // Conf: vld_thresh - double thr = regrid_dict->lookup_double(conf_key_vld_thresh, false); - info.vld_thresh = (is_bad_data(thr) ? default_vld_thresh : thr); + // Conf: vld_thresh (required) + if(regrid_dict && regrid_dict->lookup(conf_key_vld_thresh, false)) { + info.vld_thresh = regrid_dict->lookup_double(conf_key_vld_thresh); + } + // Use default RegridInfo + else if(default_info) { + info.vld_thresh = default_info->vld_thresh; + } + // Use global default + else { + info.vld_thresh = default_vld_thresh; + } + + // Conf: method (required) + if(regrid_dict && regrid_dict->lookup(conf_key_method, false)) { + info.method = int_to_interpmthd(regrid_dict->lookup_int(conf_key_method)); + } + // Use default RegridInfo + else if(default_info) { + info.method = default_info->method; + } - // Parse the method and width - info.method = int_to_interpmthd(regrid_dict->lookup_int(conf_key_method)); - info.width = regrid_dict->lookup_int(conf_key_width); + // Conf: width (required) + if(regrid_dict && regrid_dict->lookup(conf_key_width, false)) { + info.width = regrid_dict->lookup_int(conf_key_width); + } + // Use default RegridInfo + else if(default_info) { + info.width = default_info->width; + } - // Conf: shape - v = regrid_dict->lookup_int(conf_key_shape, false); - if (regrid_dict->last_lookup_status()) { - info.shape = int_to_gridtemplate(v); + // Conf: shape (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_shape, false)) { + info.shape = int_to_gridtemplate(regrid_dict->lookup_int(conf_key_shape)); + } + // Use default RegridInfo + else if(default_info) { + info.shape = default_info->shape; } + // Use global default else { - // If not specified, use the default square shape info.shape = GridTemplateFactory::GridTemplates::Square; } - // Conf: gaussian dx and radius - double conf_value = regrid_dict->lookup_double(conf_key_gaussian_dx, false); - info.gaussian.dx = (is_bad_data(conf_value) ? default_gaussian_dx : conf_value); - conf_value = regrid_dict->lookup_double(conf_key_gaussian_radius, false); - info.gaussian.radius = (is_bad_data(conf_value) ? default_gaussian_radius : conf_value); - conf_value = regrid_dict->lookup_double(conf_key_trunc_factor, false); - info.gaussian.trunc_factor = (is_bad_data(conf_value) ? default_trunc_factor : conf_value); - if (info.method == InterpMthd::Gaussian || info.method == InterpMthd::MaxGauss) info.gaussian.compute(); + // Conf: gaussian_dx (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_gaussian_dx, false)) { + info.gaussian.dx = regrid_dict->lookup_double(conf_key_gaussian_dx); + } + // Use default RegridInfo + else if(default_info) { + info.gaussian.dx = default_info->gaussian.dx; + } + // Use global default + else { + info.gaussian.dx = default_gaussian_dx; + } + + // Conf: gaussian_radius (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_gaussian_radius, false)) { + info.gaussian.radius = regrid_dict->lookup_double(conf_key_gaussian_radius); + } + // Use default RegridInfo + else if(default_info) { + info.gaussian.radius = default_info->gaussian.radius; + } + // Use global default + else { + info.gaussian.radius = default_gaussian_radius; + } + + // Conf: gaussian_trunc_factor (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_trunc_factor, false)) { + info.gaussian.trunc_factor = regrid_dict->lookup_double(conf_key_trunc_factor); + } + // Use default RegridInfo + else if(default_info) { + info.gaussian.trunc_factor = default_info->gaussian.trunc_factor; + } + // Use global default + else { + info.gaussian.trunc_factor = default_trunc_factor; + } + + // Compute Guassian parameters + if(info.method == InterpMthd::Gaussian || + info.method == InterpMthd::MaxGauss) { + info.gaussian.compute(); + } // MET#2437 Do not search the higher levels of config file context for convert, // censor_thresh, and censor_val. They must be specified within the // regrid dictionary itself. - // Conf: convert - info.convert_fx.set(regrid_dict->lookup(conf_key_convert, false)); + // Conf: convert (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_convert, false)) { + info.convert_fx.set(regrid_dict->lookup(conf_key_convert)); + } + // Use default RegridInfo + else if(default_info) { + info.convert_fx = default_info->convert_fx; + } - // Conf: censor_thresh - info.censor_thresh = regrid_dict->lookup_thresh_array(conf_key_censor_thresh, false, true, false); + // Conf: censor_thresh (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_censor_thresh, false)) { + info.censor_thresh = regrid_dict->lookup_thresh_array(conf_key_censor_thresh); + } + // Use default RegridInfo + else if(default_info) { + info.censor_thresh = default_info->censor_thresh; + } - // Conf: censor_val - info.censor_val = regrid_dict->lookup_num_array(conf_key_censor_val, false, true, false); + // Conf: censor_val (optional) + if(regrid_dict && regrid_dict->lookup(conf_key_censor_val, false)) { + info.censor_val = regrid_dict->lookup_num_array(conf_key_censor_val); + } + // Use default RegridInfo + else if(default_info) { + info.censor_val = default_info->censor_val; + } // Validate the settings info.validate(); @@ -2514,28 +2622,28 @@ void check_mask_names(const StringArray &sa) { /////////////////////////////////////////////////////////////////////////////// -void check_climo_n_vx(Dictionary *dict, const int n_vx) { - int n; +void check_climo_n_vx(Dictionary *dict, const int n_input) { + int n_climo; // Check for a valid number of climatology mean fields - n = parse_conf_n_vx(dict->lookup_array(conf_key_climo_mean_field, false)); - if(n != 0 && n != n_vx) { + n_climo = parse_conf_n_vx(dict->lookup_array(conf_key_climo_mean_field, false)); + if(n_climo != 0 && n_climo != 1 && n_climo != n_input) { mlog << Error << "\ncheck_climo_n_vx() -> " << "The number of climatology mean fields in \"" - << conf_key_climo_mean_field - << "\" must be zero or match the number (" << n_vx - << ") in \"" << conf_key_fcst_field << "\".\n\n"; + << conf_key_climo_mean_field << "\" (" << n_climo + << ") must be 0, 1, or match the number of input fields (" + << n_input << ").\n\n"; exit(1); } // Check for a valid number of climatology standard deviation fields - n = parse_conf_n_vx(dict->lookup_array(conf_key_climo_stdev_field, false)); - if(n != 0 && n != n_vx) { + n_climo = parse_conf_n_vx(dict->lookup_array(conf_key_climo_stdev_field, false)); + if(n_climo != 0 && n_climo != 1 && n_climo != n_input) { mlog << Error << "\ncheck_climo_n_vx() -> " << "The number of climatology standard deviation fields in \"" - << conf_key_climo_stdev_field - << "\" must be zero or match the number (" - << n_vx << ") in \"" << conf_key_fcst_field << "\".\n\n"; + << conf_key_climo_stdev_field << "\" (" << n_climo + << ") must be 0, 1, or match the number of input fields (" + << n_input << ").\n\n"; exit(1); } diff --git a/src/basic/vx_config/config_util.h b/src/basic/vx_config/config_util.h index 3dae869b2b..15de1e00f0 100644 --- a/src/basic/vx_config/config_util.h +++ b/src/basic/vx_config/config_util.h @@ -31,18 +31,34 @@ static const char conf_key_old_prepbufr_map[] = "obs_prefbufr_map"; // for ba //////////////////////////////////////////////////////////////////////// extern ConcatString parse_conf_version(Dictionary *dict); -extern ConcatString parse_conf_string(Dictionary *dict, const char *, bool check_empty = true); +extern ConcatString parse_conf_string( + Dictionary *dict, + const char *, + bool check_empty=true); extern GrdFileType parse_conf_file_type(Dictionary *dict); extern std::map - parse_conf_output_flag(Dictionary *dict, const STATLineType *, int); + parse_conf_output_flag( + Dictionary *dict, + const STATLineType *, int); extern std::map parse_conf_output_stats(Dictionary *dict); extern int parse_conf_n_vx(Dictionary *dict); -extern Dictionary parse_conf_i_vx_dict(Dictionary *dict, int index); -extern StringArray parse_conf_tc_model(Dictionary *dict, bool error_out = default_dictionary_error_out); -extern StringArray parse_conf_message_type(Dictionary *dict, bool error_out = default_dictionary_error_out); -extern StringArray parse_conf_sid_list(Dictionary *dict, const char *); -extern void parse_sid_mask(const ConcatString &, StringArray &, ConcatString &); +extern Dictionary parse_conf_i_vx_dict( + Dictionary *dict, + int index); +extern StringArray parse_conf_tc_model( + Dictionary *dict, + bool error_out=default_dictionary_error_out); +extern StringArray parse_conf_message_type( + Dictionary *dict, + bool error_out=default_dictionary_error_out); +extern StringArray parse_conf_sid_list( + Dictionary *dict, + const char *); +extern void parse_sid_mask( + const ConcatString &, + StringArray &, + ConcatString &); extern std::vector parse_conf_llpnt_mask(Dictionary *dict); extern StringArray parse_conf_obs_qty_inc(Dictionary *dict); @@ -51,27 +67,40 @@ extern NumArray parse_conf_ci_alpha(Dictionary *dict); extern NumArray parse_conf_eclv_points(Dictionary *dict); extern ClimoCDFInfo parse_conf_climo_cdf(Dictionary *dict); extern TimeSummaryInfo parse_conf_time_summary(Dictionary *dict); -extern std::map parse_conf_key_value_map( - Dictionary *dict, const char *conf_key_map_name, const char *caller=nullptr); +extern std::map + parse_conf_key_value_map( + Dictionary *dict, + const char *conf_key_map_name, + const char *caller=nullptr); extern void parse_add_conf_key_value_map( - Dictionary *dict, const char *conf_key_map_name, std::map *m); + Dictionary *dict, + const char *conf_key_map_name, + std::map *m); extern void parse_add_conf_key_values_map( - Dictionary *dict, const char *conf_key_map_name, - std::map *m, const char *caller=nullptr); + Dictionary *dict, + const char *conf_key_map_name, + std::map *m, + const char *caller=nullptr); extern std::map parse_conf_message_type_map(Dictionary *dict); extern std::map parse_conf_message_type_group_map(Dictionary *dict); -extern std::map parse_conf_metadata_map(Dictionary *dict); +extern std::map + parse_conf_metadata_map(Dictionary *dict); extern std::map parse_conf_obs_name_map(Dictionary *dict); extern std::map parse_conf_obs_to_qc_map(Dictionary *dict); extern std::map parse_conf_key_convert_map( - Dictionary *dict, const char *conf_key_map_name, const char *caller=nullptr); + Dictionary *dict, + const char *conf_key_map_name, + const char *caller=nullptr); extern BootInfo parse_conf_boot(Dictionary *dict); -extern RegridInfo parse_conf_regrid(Dictionary *dict, bool error_out = default_dictionary_error_out); +extern RegridInfo parse_conf_regrid( + Dictionary *dict, + RegridInfo *default_info=nullptr, + bool error_out=default_dictionary_error_out); extern InterpInfo parse_conf_interp(Dictionary *dict, const char *); extern NbrhdInfo parse_conf_nbrhd(Dictionary *dict, const char *); extern HiRAInfo parse_conf_hira(Dictionary *dict); @@ -92,7 +121,9 @@ extern ConcatString parse_conf_ugrid_coordinates_file(Dictionary *dict); extern ConcatString parse_conf_ugrid_dataset(Dictionary *dict); extern ConcatString parse_conf_ugrid_map_config(Dictionary *dict); extern double parse_conf_ugrid_max_distance_km(Dictionary *dict); -extern void parse_add_conf_ugrid_metadata_map(Dictionary *dict, std::map *m); +extern void parse_add_conf_ugrid_metadata_map( + Dictionary *dict, + std::map *m); extern void check_mask_names(const StringArray &); diff --git a/src/basic/vx_config/dictionary.h b/src/basic/vx_config/dictionary.h index 97faa4e3dc..bcb4a7f34b 100644 --- a/src/basic/vx_config/dictionary.h +++ b/src/basic/vx_config/dictionary.h @@ -243,7 +243,7 @@ class Dictionary { virtual const DictionaryEntry * operator[](int) const; - virtual const Dictionary * parent() const; + virtual Dictionary * parent() const; virtual bool is_array() const; @@ -346,7 +346,7 @@ class Dictionary { inline int Dictionary::n_entries() const { return Nentries; } -inline const Dictionary * Dictionary::parent() const { return Parent; } +inline Dictionary * Dictionary::parent() const { return Parent; } inline void Dictionary::set_is_array(bool __tf) { IsArray = __tf; return; } diff --git a/src/basic/vx_config/threshold.cc b/src/basic/vx_config/threshold.cc index cbf0a3cb7d..ef650ef2c0 100644 --- a/src/basic/vx_config/threshold.cc +++ b/src/basic/vx_config/threshold.cc @@ -114,7 +114,7 @@ if ( !match && mlog << Debug(2) << R"(Please replace the deprecated "SCP" and "CDP" )" << R"(threshold types with "SOCP" and "OCDP", respectively, in the ")" - << str << R"(" threshold string.\n)"; + << str << R"(" threshold string.)" << "\n"; print_climo_perc_thresh_log_message = false; diff --git a/src/libcode/vx_data2d/var_info.cc b/src/libcode/vx_data2d/var_info.cc index 2c92c6bf69..ac12513e76 100644 --- a/src/libcode/vx_data2d/var_info.cc +++ b/src/libcode/vx_data2d/var_info.cc @@ -116,6 +116,7 @@ void VarInfo::assign(const VarInfo &v) { nBins = v.nBins; Range = v.Range; + DefaultRegrid = v.DefaultRegrid; Regrid = v.Regrid; SetAttrName = v.SetAttrName; @@ -176,6 +177,7 @@ void VarInfo::clear() { nBins = 0; Range.clear(); + DefaultRegrid.clear(); Regrid.clear(); SetAttrName.clear(); @@ -215,26 +217,29 @@ void VarInfo::dump(ostream &out) const { // Dump out the contents out << "VarInfo::dump():\n" - << " MagicStr = " << MagicStr.contents() << "\n" - << " ReqName = " << ReqName.contents() << "\n" - << " Name = " << Name.contents() << "\n" - << " LongName = " << LongName.contents() << "\n" - << " Units = " << Units.contents() << "\n" - << " PFlag = " << PFlag << "\n" - << " PName = " << PName.contents() << "\n" - << " PUnits = " << PUnits.contents() << "\n" - << " PAsScalar = " << PAsScalar << "\n" - << " UVIndex = " << UVIndex << "\n" - << " Init = " << init_str << " (" << Init << ")\n" - << " Valid = " << valid_str << " (" << Valid << ")\n" - << " Ensemble = " << Ensemble.contents() << "\n" - << " Lead = " << lead_str << " (" << Lead << ")\n" - << " ConvertFx = " << (ConvertFx.is_set() ? "IsSet" : "(nul)") << "\n" - << " CensorThresh = " << CensorThresh.get_str() << "\n" - << " CensorVal = " << CensorVal.serialize() << "\n" - << " nBins = " << nBins << "\n" - << " Range = " << Range.serialize() << "\n" - << " Regrid = " << interpmthd_to_string(Regrid.method) << "\n"; + << " MagicStr = " << MagicStr.contents() << "\n" + << " ReqName = " << ReqName.contents() << "\n" + << " Name = " << Name.contents() << "\n" + << " LongName = " << LongName.contents() << "\n" + << " Units = " << Units.contents() << "\n" + << " PFlag = " << PFlag << "\n" + << " PName = " << PName.contents() << "\n" + << " PUnits = " << PUnits.contents() << "\n" + << " PAsScalar = " << PAsScalar << "\n" + << " UVIndex = " << UVIndex << "\n" + << " Init = " << init_str << " (" << Init << ")\n" + << " Valid = " << valid_str << " (" << Valid << ")\n" + << " Ensemble = " << Ensemble.contents() << "\n" + << " Lead = " << lead_str << " (" << Lead << ")\n" + << " ConvertFx = " << (ConvertFx.is_set() ? "IsSet" : "(nul)") << "\n" + << " CensorThresh = " << CensorThresh.get_str() << "\n" + << " CensorVal = " << CensorVal.serialize() << "\n" + << " nBins = " << nBins << "\n" + << " Range = " << Range.serialize() << "\n" + << " DefaultRegrid = " << interpmthd_to_string(DefaultRegrid.method) + << "(" << DefaultRegrid.width << ")\n" + << " Regrid = " << interpmthd_to_string(Regrid.method) + << "(" << Regrid.width << ")\n"; Level.dump(out); @@ -425,6 +430,13 @@ void VarInfo::set_range(const NumArray &a) { /////////////////////////////////////////////////////////////////////////////// +void VarInfo::set_default_regrid(const RegridInfo &ri) { + DefaultRegrid = ri; + return; +} + +/////////////////////////////////////////////////////////////////////////////// + void VarInfo::set_regrid(const RegridInfo &ri) { Regrid = ri; return; @@ -528,7 +540,7 @@ void VarInfo::set_dict(Dictionary &dict) { if(dict.last_lookup_status()) set_range(na); // Parse regrid, if present - Regrid = parse_conf_regrid(&dict, false); + Regrid = parse_conf_regrid(&dict, &DefaultRegrid, false); // Parse set_attr strings SetAttrName = diff --git a/src/libcode/vx_data2d/var_info.h b/src/libcode/vx_data2d/var_info.h index 3271376816..eba7551b67 100644 --- a/src/libcode/vx_data2d/var_info.h +++ b/src/libcode/vx_data2d/var_info.h @@ -57,7 +57,8 @@ class VarInfo int nBins; // Number of pdf bins NumArray Range; // Range of pdf bins - RegridInfo Regrid; // Regridding logic + RegridInfo DefaultRegrid; // Default regridding logic + RegridInfo Regrid; // Regridding logic // Options to override metadata ConcatString SetAttrName; @@ -189,6 +190,7 @@ class VarInfo void set_n_bins(const int &); void set_range(const NumArray &); + void set_default_regrid(const RegridInfo &); void set_regrid(const RegridInfo &); void set_level_info_grib(Dictionary & dict); diff --git a/src/libcode/vx_data2d_nc_cf/nc_cf_file.cc b/src/libcode/vx_data2d_nc_cf/nc_cf_file.cc index e985870169..41c8106f31 100644 --- a/src/libcode/vx_data2d_nc_cf/nc_cf_file.cc +++ b/src/libcode/vx_data2d_nc_cf/nc_cf_file.cc @@ -2299,7 +2299,7 @@ void NcCfFile::get_grid_mapping_polar_stereographic(const NcVar *grid_mapping_va { double x_coord_to_m_cf = 1.0; double y_coord_to_m_cf = 1.0; - static const string method_name = "NcCfFile::get_grid_mapping_polar_stereographic() --> "; + static const string method_name = "NcCfFile::get_grid_mapping_polar_stereographic() -> "; // Get projection attributes // proj_origin_lat: either 90.0 or -90.0, to decide the northern/southern hemisphere diff --git a/src/libcode/vx_regrid/vx_regrid.cc b/src/libcode/vx_regrid/vx_regrid.cc index 5fcc970601..3914c83004 100644 --- a/src/libcode/vx_regrid/vx_regrid.cc +++ b/src/libcode/vx_regrid/vx_regrid.cc @@ -40,6 +40,10 @@ switch ( info.method ) { case InterpMthd::LS_Fit: case InterpMthd::Bilin: case InterpMthd::Nearest: + case InterpMthd::Upper_Left: + case InterpMthd::Upper_Right: + case InterpMthd::Lower_Right: + case InterpMthd::Lower_Left: out = met_regrid_generic (in, from_grid, to_grid, info); break; diff --git a/src/libcode/vx_statistics/apply_mask.cc b/src/libcode/vx_statistics/apply_mask.cc index bd12b1a25b..01c696243c 100644 --- a/src/libcode/vx_statistics/apply_mask.cc +++ b/src/libcode/vx_statistics/apply_mask.cc @@ -633,7 +633,8 @@ DataPlane parse_geog_data(Dictionary *dict, const Grid &vx_grid, regrid_info = parse_conf_regrid(dict); mlog << Debug(2) << "Regridding geography mask data " << info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << regrid_info.get_str() << ".\n"; dp = met_regrid(dp, mtddf->grid(), vx_grid, regrid_info); } } diff --git a/src/libcode/vx_statistics/read_climo.cc b/src/libcode/vx_statistics/read_climo.cc index f5a0f2db71..8f8ddd8e9b 100644 --- a/src/libcode/vx_statistics/read_climo.cc +++ b/src/libcode/vx_statistics/read_climo.cc @@ -39,8 +39,11 @@ static DataPlane climo_hms_interp( //////////////////////////////////////////////////////////////////////// -DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, - unixtime vld_ut, const Grid &vx_grid, +DataPlane read_climo_data_plane(Dictionary *dict, + const char *entry_name, + int i_vx, + unixtime vld_ut, + const Grid &vx_grid, const char *desc) { DataPlane dp; DataPlaneArray dpa; @@ -49,7 +52,8 @@ DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, if(!dict) return dp; // Read array of climatology fields - dpa = read_climo_data_plane_array(dict, i_vx, vld_ut, vx_grid, desc); + dpa = read_climo_data_plane_array(dict, entry_name, i_vx, + vld_ut, vx_grid, desc); // Check for multiple matches if(dpa.n_planes() > 1) { @@ -66,82 +70,120 @@ DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, //////////////////////////////////////////////////////////////////////// -DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, +DataPlaneArray read_climo_data_plane_array(Dictionary *dict, + const char *climo_name, + int i_vx, unixtime vld_ut, const Grid &vx_grid, const char *desc) { + + const char *method_name = "read_climo_data_plane_array() -> "; + + // + // Parse each of the climatology configuration entries separately + // using the "climo_name.entry_name" scope notation. Use the value + // from the specified dictionary (e.g. "fcst.climo_mean") if found, + // or use the value from the parent dictionary (e.g. top-level config + // "climo_mean") if not found. + // DataPlaneArray dpa; - StringArray climo_files; - RegridInfo regrid_info; - InterpMthd time_interp; - GrdFileType ctype; - double day_interval, hour_interval; - int i, day_ts, hour_ts; + ConcatString cs; // Check for null if(!dict) return dpa; - // Get the i-th array entry - Dictionary i_dict = parse_conf_i_vx_dict(dict, i_vx); - - // Climatology mean and standard deviation files - climo_files = i_dict.lookup_string_array(conf_key_file_name, false); + // Parse the "file_name" array entry + cs << cs_erase << climo_name << "." << conf_key_file_name; + StringArray climo_files(dict->lookup_string_array(cs.c_str())); - // Check for at least one file + // Check for at least one input file if(climo_files.n() == 0) return dpa; - // Regrid info - regrid_info = parse_conf_regrid(&i_dict); + // Parse the "field" array entry + cs << cs_erase << climo_name << "." << conf_key_field; + Dictionary *field_dict = dict->lookup_array(cs.c_str(), false); + + // Determine which climo array entry to use + int i_climo_field = bad_data_int; + if(field_dict->n_entries() == 0) return dpa; + else if(field_dict->n_entries() == 1) i_climo_field = 0; + else i_climo_field = i_vx; + + // Parse the climo dictionary + Dictionary i_dict = parse_conf_i_vx_dict(field_dict, i_climo_field); + + // Parse the "regrid" dictionary from the top-level + // config file context (e.g. "config.climo_mean.regrid") + // to serve as the default. + RegridInfo regrid_default = parse_conf_regrid( + dict->parent()->lookup_dictionary(climo_name, false)); - // Time interpolation - time_interp = int_to_interpmthd(i_dict.lookup_int(conf_key_time_interp_method)); + // Parse the "time_interp_method" + cs << cs_erase << climo_name << "." << conf_key_time_interp_method; + InterpMthd time_interp = int_to_interpmthd(dict->lookup_int(cs.c_str())); - // Day interval - day_interval = i_dict.lookup_double(conf_key_day_interval); + // Parse the "day_interval" value + cs << cs_erase << climo_name << "." << conf_key_day_interval; + double day_interval = dict->lookup_double(cs.c_str()); - // Range check day_interval + // Range check day_interval value if(!is_bad_data(day_interval) && day_interval < 1) { - mlog << Error << "\nread_climo_data_plane_array() -> " + mlog << Error << "\n" << method_name << "The " << conf_key_day_interval << " entry (" << day_interval << ") can be set to " << na_str << " or a value of at least 1.\n\n"; exit(1); } - // Hour interval - hour_interval = i_dict.lookup_double(conf_key_hour_interval); + // Parse the "hour_interval" value + cs << cs_erase << climo_name << "." << conf_key_hour_interval; + double hour_interval = dict->lookup_double(cs.c_str()); // Range check hour_interval if(!is_bad_data(hour_interval) && (hour_interval <= 0 || hour_interval > 24)) { - mlog << Error << "\nread_climo_data_plane_array() -> " + mlog << Error << "\n" << method_name << "The " << conf_key_hour_interval << " entry (" << hour_interval << ") can be set to " << na_str << " or a value between 0 and 24.\n\n"; exit(1); } - // Check if file_type was specified - ctype = parse_conf_file_type(&i_dict); + // Log search criteria + if(mlog.verbosity_level() >= 5) { + mlog << Debug(5) + << "Searching " << climo_files.n() + << " file(s) for " << desc + << " data using climo_name = " << climo_name + << ", i_vx = " << i_vx + << ", valid time = " << unix_to_yyyymmdd_hhmmss(vld_ut) + << ", time_interp = " << interpmthd_to_string(time_interp) + << ", day_interval = " << day_interval + << ", hour_interval = " << hour_interval + << "\n"; + } // Store the time steps in seconds - day_ts = (is_bad_data(day_interval) ? bad_data_int : - nint(day_interval * 24.0 * sec_per_hour)); - hour_ts = (is_bad_data(hour_interval) ? bad_data_int : - nint(hour_interval * sec_per_hour)); - + int day_ts = (is_bad_data(day_interval) ? bad_data_int : + nint(day_interval * 24.0 * sec_per_hour)); + int hour_ts = (is_bad_data(hour_interval) ? bad_data_int : + nint(hour_interval * sec_per_hour)); + + // Check if file_type was specified + GrdFileType ctype = parse_conf_file_type(&i_dict); + // Search the files for the requested records - for(i=0; ifile_type()); + info->set_default_regrid(regrid_default); info->set_dict(*dict); // Read data planes @@ -226,9 +269,10 @@ void read_climo_file(const char *climo_file, GrdFileType ctype, if(!(mtddf->grid() == vx_grid)) { mlog << Debug(2) << "Regridding " << clm_ut_cs << " " << desc << " field " << info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << info->regrid().get_str() << ".\n"; dp = met_regrid(clm_dpa[i], mtddf->grid(), vx_grid, - regrid_info); + info->regrid()); } else { dp = clm_dpa[i]; diff --git a/src/libcode/vx_statistics/read_climo.h b/src/libcode/vx_statistics/read_climo.h index 362efa3fce..64db97c04a 100644 --- a/src/libcode/vx_statistics/read_climo.h +++ b/src/libcode/vx_statistics/read_climo.h @@ -18,13 +18,15 @@ //////////////////////////////////////////////////////////////////////// -extern DataPlane read_climo_data_plane(Dictionary *, int, - unixtime, const Grid &, - const char *); - -extern DataPlaneArray read_climo_data_plane_array(Dictionary *, int, - unixtime, const Grid &, - const char *); +extern DataPlane read_climo_data_plane( + Dictionary *, const char *, + int, unixtime, const Grid &, + const char *); + +extern DataPlaneArray read_climo_data_plane_array( + Dictionary *, const char *, + int, unixtime, const Grid &, + const char *); //////////////////////////////////////////////////////////////////////// diff --git a/src/tools/core/ensemble_stat/ensemble_stat.cc b/src/tools/core/ensemble_stat/ensemble_stat.cc index 826b8eaf7a..8a92272d75 100644 --- a/src/tools/core/ensemble_stat/ensemble_stat.cc +++ b/src/tools/core/ensemble_stat/ensemble_stat.cc @@ -633,7 +633,8 @@ bool get_data_plane(const char *infile, GrdFileType ftype, if(do_regrid && !(mtddf->grid() == grid)) { mlog << Debug(1) << "Regridding field \"" << info->magic_str() - << "\" to the verification grid.\n"; + << "\" to the verification grid using " + << info->regrid().get_str() << ".\n"; dp = met_regrid(dp, mtddf->grid(), grid, info->regrid()); } @@ -691,7 +692,8 @@ bool get_data_plane_array(const char *infile, GrdFileType ftype, mlog << Debug(1) << "Regridding " << dpa.n_planes() << " field(s) \"" << info->magic_str() - << "\" to the verification grid.\n"; + << "\" to the verification grid using " + << info->regrid().get_str() << ".\n"; // Loop through the forecast fields for(i=0; imagic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << info->regrid().get_str() << ".\n"; dp = met_regrid(dp, mtddf->grid(), grid, info->regrid()); } diff --git a/src/tools/core/grid_stat/grid_stat_conf_info.cc b/src/tools/core/grid_stat/grid_stat_conf_info.cc index 6ec2dd8f98..19c5a48e83 100644 --- a/src/tools/core/grid_stat/grid_stat_conf_info.cc +++ b/src/tools/core/grid_stat/grid_stat_conf_info.cc @@ -211,7 +211,8 @@ void GridStatConfInfo::process_config(GrdFileType ftype, vx_opt = new GridStatVxOpt [n_vx]; // Check for consistent number of climatology fields - check_climo_n_vx(&conf, n_vx); + check_climo_n_vx(fdict, n_vx); + check_climo_n_vx(odict, n_vx); // Parse settings for each verification task for(i=0; iset_default_regrid(regrid_info); + obs_info->set_default_regrid(regrid_info); + // Set the VarInfo objects fcst_info->set_dict(fdict); obs_info->set_dict(odict); diff --git a/src/tools/core/mode/mode_exec.cc b/src/tools/core/mode/mode_exec.cc index 578c92acb7..2ee853f30b 100644 --- a/src/tools/core/mode/mode_exec.cc +++ b/src/tools/core/mode/mode_exec.cc @@ -292,7 +292,8 @@ void ModeExecutive::setup_traditional_fcst_obs_data() if ( !(fcst_mtddf->grid() == grid) ) { mlog << Debug(1) << "Regridding forecast " << engine.conf_info.Fcst->var_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << engine.conf_info.Fcst->var_info->regrid().get_str() << ".\n"; Fcst_sd.data = met_regrid(Fcst_sd.data, fcst_mtddf->grid(), grid, engine.conf_info.Fcst->var_info->regrid()); } @@ -302,7 +303,8 @@ void ModeExecutive::setup_traditional_fcst_obs_data() if ( !(obs_mtddf->grid() == grid) ) { mlog << Debug(1) << "Regridding observation " << engine.conf_info.Obs->var_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << engine.conf_info.Obs->var_info->regrid().get_str() << ".\n"; Obs_sd.data = met_regrid(Obs_sd.data, obs_mtddf->grid(), grid, engine.conf_info.Obs->var_info->regrid()); } @@ -454,7 +456,8 @@ void ModeExecutive::setup_multivar_fcst_data(const Grid &verification_grid, if ( !(input._grid == grid) ) { mlog << Debug(1) << "Regridding forecast " << engine.conf_info.Fcst->var_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << engine.conf_info.Fcst->var_info->regrid().get_str() << ".\n"; Fcst_sd.data = met_regrid(Fcst_sd.data, input._grid, grid, engine.conf_info.Fcst->var_info->regrid()); } @@ -520,7 +523,8 @@ void ModeExecutive::setup_multivar_obs_data(const Grid &verification_grid, if ( !(input._grid == grid) ) { mlog << Debug(1) << "Regridding observation " << engine.conf_info.Obs->var_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << engine.conf_info.Obs->var_info->regrid().get_str() << ".\n"; Obs_sd.data = met_regrid(Obs_sd.data, input._grid, grid, engine.conf_info.Obs->var_info->regrid()); } diff --git a/src/tools/core/point_stat/point_stat.cc b/src/tools/core/point_stat/point_stat.cc index bda41ddf26..9e47b3181e 100644 --- a/src/tools/core/point_stat/point_stat.cc +++ b/src/tools/core/point_stat/point_stat.cc @@ -649,7 +649,8 @@ void process_fcst_climo_files() { mlog << Debug(1) << "Regridding " << fcst_dpa.n_planes() << " forecast field(s) for " << fcst_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << fcst_info->regrid().get_str() << ".\n"; // Loop through the forecast fields for(j=0; j " << R"(when using the "-paired" command line option, the )" - << "the file list length (" << fcst_files.n() + << "file list length (" << fcst_files.n() << ") and series length (" << n_series_pair << ") must match.\n\n"; usage(); @@ -562,7 +562,8 @@ void get_series_data(int i_series, mlog << Debug(2) << "Regridding forecast " << fcst_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << fcst_info->regrid().get_str() << ".\n"; fcst_dp = met_regrid(fcst_dp, fcst_grid, grid, fcst_info->regrid()); } @@ -582,7 +583,8 @@ void get_series_data(int i_series, mlog << Debug(2) << "Regridding observation " << obs_info->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << obs_info->regrid().get_str() << ".\n"; obs_dp = met_regrid(obs_dp, obs_grid, grid, obs_info->regrid()); } @@ -859,11 +861,14 @@ void process_scores() { // Loop over the series variable for(int i_series=0; i_series 1 ? i_series : 0); + int i_obs = (conf_info.get_n_obs() > 1 ? i_series : 0); // Store the current VarInfo objects - fcst_info = conf_info.fcst_info[i_fcst]; + fcst_info = (conf_info.get_n_fcst() > 1 ? + conf_info.fcst_info[i_series] : + conf_info.fcst_info[0]); obs_info = (conf_info.get_n_obs() > 1 ? conf_info.obs_info[i_series] : conf_info.obs_info[0]); @@ -898,22 +903,26 @@ void process_scores() { // Read forecast climatology data fcmn_dp = read_climo_data_plane( - conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), + conf_info.conf.lookup_dictionary(conf_key_fcst), + conf_key_climo_mean, i_fcst, fcst_dp.valid(), grid, "forecast climatology mean"); fcsd_dp = read_climo_data_plane( - conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), + conf_info.conf.lookup_dictionary(conf_key_fcst), + conf_key_climo_stdev, i_fcst, fcst_dp.valid(), grid, "forecast climatology standard deviation"); // Read observation climatology data ocmn_dp = read_climo_data_plane( - conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i_fcst, fcst_dp.valid(), grid, + conf_info.conf.lookup_dictionary(conf_key_obs), + conf_key_climo_mean, + i_obs, fcst_dp.valid(), grid, "observation climatology mean"); ocsd_dp = read_climo_data_plane( - conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i_fcst, fcst_dp.valid(), grid, + conf_info.conf.lookup_dictionary(conf_key_obs), + conf_key_climo_stdev, + i_obs, fcst_dp.valid(), grid, "observation climatology standard deviation"); bool fcmn_flag = !fcmn_dp.is_empty(); diff --git a/src/tools/core/series_analysis/series_analysis_conf_info.cc b/src/tools/core/series_analysis/series_analysis_conf_info.cc index 2e032256a0..fd19bf61bc 100644 --- a/src/tools/core/series_analysis/series_analysis_conf_info.cc +++ b/src/tools/core/series_analysis/series_analysis_conf_info.cc @@ -212,8 +212,9 @@ void SeriesAnalysisConfInfo::process_config(GrdFileType ftype, exit(1); } - // Check climatology fields - check_climo_n_vx(&conf, n_fcst); + // Check for consistent number of climatology fields + check_climo_n_vx(fdict, n_fcst); + check_climo_n_vx(odict, n_obs); // Allocate space based on the number of verification tasks fcst_info = new VarInfo * [n_fcst]; diff --git a/src/tools/core/wavelet_stat/wavelet_stat.cc b/src/tools/core/wavelet_stat/wavelet_stat.cc index 27d868d78b..648d5fd9b6 100644 --- a/src/tools/core/wavelet_stat/wavelet_stat.cc +++ b/src/tools/core/wavelet_stat/wavelet_stat.cc @@ -300,7 +300,8 @@ void process_scores() { if(!(fcst_mtddf->grid() == grid)) { mlog << Debug(1) << "Regridding forecast " << conf_info.fcst_info[i]->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << conf_info.fcst_info[i]->regrid().get_str() << ".\n"; fcst_dp = met_regrid(fcst_dp, fcst_mtddf->grid(), grid, conf_info.fcst_info[i]->regrid()); } @@ -326,7 +327,8 @@ void process_scores() { if(!(obs_mtddf->grid() == grid)) { mlog << Debug(1) << "Regridding observation " << conf_info.obs_info[i]->magic_str() - << " to the verification grid.\n"; + << " to the verification grid using " + << conf_info.obs_info[i]->regrid().get_str() << ".\n"; obs_dp = met_regrid(obs_dp, obs_mtddf->grid(), grid, conf_info.obs_info[i]->regrid()); } diff --git a/src/tools/other/gen_ens_prod/gen_ens_prod.cc b/src/tools/other/gen_ens_prod/gen_ens_prod.cc index b088c74dcf..9f36c55ad3 100644 --- a/src/tools/other/gen_ens_prod/gen_ens_prod.cc +++ b/src/tools/other/gen_ens_prod/gen_ens_prod.cc @@ -468,18 +468,20 @@ void get_climo_mean_stdev(GenEnsProdVarInfo *ens_info, int i_var, << ens_info->get_var_info(i_ens)->magic_str() << "\".\n"; cmn_dp = read_climo_data_plane( - conf_info.conf.lookup_array(conf_key_climo_mean_field, false), + conf_info.conf.lookup_dictionary(conf_key_ens), + conf_key_climo_mean, i_var, ens_valid_ut, grid, - "climatology mean"); + "ensemble climatology mean"); mlog << Debug(4) << "Reading climatology standard deviation data for ensemble field \"" << ens_info->get_var_info(i_ens)->magic_str() << "\".\n"; csd_dp = read_climo_data_plane( - conf_info.conf.lookup_array(conf_key_climo_stdev_field, false), + conf_info.conf.lookup_dictionary(conf_key_ens), + conf_key_climo_stdev, i_var, ens_valid_ut, grid, - "climatology standard deviation"); + "ensemble climatology standard deviation"); // Unset the MET_ENS_MEMBER_ID environment variable if(set_ens_mem_id) { @@ -647,7 +649,8 @@ bool get_data_plane(const char *infile, GrdFileType ftype, if(!(mtddf->grid() == grid)) { mlog << Debug(1) << "Regridding field \"" << info->magic_str() - << "\" to the verification grid.\n"; + << "\" to the verification grid using " + << info->regrid().get_str() << ".\n"; dp = met_regrid(dp, mtddf->grid(), grid, info->regrid()); } diff --git a/src/tools/other/grid_diag/grid_diag.cc b/src/tools/other/grid_diag/grid_diag.cc index bb444f2061..cd5ddc843b 100644 --- a/src/tools/other/grid_diag/grid_diag.cc +++ b/src/tools/other/grid_diag/grid_diag.cc @@ -291,7 +291,8 @@ void process_series(void) { if(!(cur_grid == grid)) { mlog << Debug(2) << "Regridding field " << data_info->magic_str_attr() - << " to the verification grid.\n"; + << " to the verification grid using " + << data_info->regrid().get_str() << ".\n"; data_dp[i_var] = met_regrid(data_dp[i_var], cur_grid, grid, data_info->regrid()); diff --git a/src/tools/other/plot_point_obs/plot_point_obs_conf_info.cc b/src/tools/other/plot_point_obs/plot_point_obs_conf_info.cc index 23311d2e3c..4a06fd3e89 100644 --- a/src/tools/other/plot_point_obs/plot_point_obs_conf_info.cc +++ b/src/tools/other/plot_point_obs/plot_point_obs_conf_info.cc @@ -486,7 +486,8 @@ void PlotPointObsConfInfo::process_config( // Regrid, if requested if(grid_data_info->regrid().enable) { mlog << Debug(1) << "Regridding field " - << grid_data_info->magic_str() << ".\n"; + << grid_data_info->magic_str() << " using " + << grid_data_info->regrid().get_str() << ".\n"; Grid to_grid(parse_vx_grid(grid_data_info->regrid(), &grid, &grid)); grid_data = met_regrid(grid_data, grid, to_grid, diff --git a/src/tools/tc_utils/tc_diag/tc_diag.cc b/src/tools/tc_utils/tc_diag/tc_diag.cc index 0b551fd1f2..c4b76a7cdd 100644 --- a/src/tools/tc_utils/tc_diag/tc_diag.cc +++ b/src/tools/tc_utils/tc_diag/tc_diag.cc @@ -2286,9 +2286,8 @@ void TmpFileInfo::write_nc_data(const VarInfo *vi, const DataPlane &dp_in, RegridInfo ri = vi->regrid(); mlog << Debug(4) << "Regridding \"" << vi->magic_str() - << "\" to the \"" << domain << "\" domain using the " - << interpmthd_to_string(ri.method) << "(" << ri.width - << ") interpolation method.\n"; + << "\" to the \"" << domain << "\" domain using " + << ri.get_str() << ".\n"; // Do the cylindrical coordinate transformation if(dp_in.nxy() > 0) {