diff --git a/.config_files.xml b/.config_files.xml
index d1a592e5db..7f0d6bb11a 100644
--- a/.config_files.xml
+++ b/.config_files.xml
@@ -19,11 +19,8 @@
$SRCROOT$SRCROOT/components/slim/
- $SRCROOT/components/cpl7/components/data_comps_$COMP_INTERFACE/dlnd$SRCROOT/components/cdeps/dlnd
- $SRCROOT/components/cpl7/components/stub_comps_$COMP_INTERFACE/slnd$CIMEROOT/CIME/non_py/src/components/stub_comps_$COMP_INTERFACE/slnd
- $SRCROOT/components/cpl7/components/xcpl_comps_$COMP_INTERFACE/xlnd$CIMEROOT/CIME/non_py/src/components/xcpl_comps_$COMP_INTERFACE/xlndcase_comps
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 3035923261..ccb310a4f9 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -2,6 +2,7 @@
b429b63824e09f82e95d2982f14311cbbd8e4a37
d229b5c6689efc4c2a6cef077515c4ccd5c18ff6
4cd83cb3ee6d85eb909403487abf5eeaf4d98911
+d229b5c6689efc4c2a6cef077515c4ccd5c18ff6
0aa2957c1f8603c63fa30b11295c06cfddff44a5
2cdb380febb274478e84cd90945aee93f29fa2e6
e44dc469439e02e9ee582dab274d890ebdfab104
@@ -24,6 +25,7 @@ a9d96219902cf609636886c7073a84407f450d9a
d866510188d26d51bcd6d37239283db690af7e82
0dcd0a3c1abcaffe5529f8d79a6bc34734b195c7
e096358c832ab292ddfd22dd5878826c7c788968
+475831f0fb0e31e97f630eac4e078c886558b61c
# Ran SystemTests and python/ctsm through black python formatter
5364ad66eaceb55dde2d3d598fe4ce37ac83a93c
8056ae649c1b37f5e10aaaac79005d6e3a8b2380
@@ -34,6 +36,7 @@ e096358c832ab292ddfd22dd5878826c7c788968
6fccf682eaf718615407d9bacdd3903b8786a03d
2500534eb0a83cc3aff94b30fb62e915054030bf
78d05967c2b027dc9776a884716597db6ef7f57c
+47839a77229c61555e3b8932927bb54cdc511b27
a0d014fae9550dd9ffbc934abd29ef16176f8208
c7b7ca1d94ac19abb9ecea9fb5b712ddbdd6645d
b565b55ce7a9f8d812a573d716a5fd3d78cfea81
@@ -44,3 +47,5 @@ aa04d1f7d86cc2503b98b7e2b2d84dbfff6c316b
6c6f57e948bfa31e60b383536cc21663fedb8b70
9660667b1267dcd4150889f5f39db540158be74a
665cf86102e09b4c4c5a140700676dca23bc55a9
+1a49e547ba3c48fa483f9ae81a8f05adcd6b888c
+045d90f1d80f713eb3ae0ac58f6c2352937f1eb0
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 4c8f6ca499..c4a381383b 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -10,11 +10,13 @@ Are answers expected to change (and if so in what way)?
Any User Interface Changes (namelist or namelist defaults changes)?
+Does this create a need to change or add documentation? Did you do so?
+
Testing performed, if any:
(List what testing you did to show your changes worked as expected)
(This can be manual testing or running of the different test suites)
(Documentation on system testing is here: https://github.com/ESCOMP/ctsm/wiki/System-Testing-Guide)
-(aux_clm on cheyenne for intel/gnu and izumi for intel/gnu/nag/pgi is the standard for tags on master)
+(aux_clm on derecho for intel/gnu and izumi for intel/gnu/nag/nvhpc is the standard for tags on master)
**NOTE: Be sure to check your coding style against the standard
(https://github.com/ESCOMP/ctsm/wiki/CTSM-coding-guidelines) and review
diff --git a/.gitignore b/.gitignore
index 1da8072fed..27823e7f54 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,18 +1,3 @@
-# directories checked out by manage_externals, and other files created
-# by manage_externals
-manage_externals.log
-ccs_config
-/src/fates/
-/cime/
-/components/
-/libraries/
-/share/
-/doc/doc-builder/
-
-# ignore svn directories
-**/.svn/**
-.svn/
-
# netcdf files
*.nc
# but don't ignore netcdf files here:
@@ -75,7 +60,6 @@ buildnmlc
td.*.status
td.*.log
td.*.status.xFail
-test_driver_*.sh
# mksurfdata output
surfdata_*.log
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000000..19bc024208
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,130 @@
+# This is a git submodule file with additional support for
+# git-fleximod (https://github.com/ESMCI/git-fleximod)
+#
+# The additional flags supported by git-fleximod are
+# fxtag - the tag associated with the submodule, this tag can be tested for
+# consistancy with the submodule hash using git-fleximod status
+# the hash can be updated to the tag using git-fleximod update
+#
+# fxrequired - indicates if a given submodule should be checked out on install
+# submoudules can be toplevel or internal and required or optional
+# toplevel means that the submodule should only be checked out if the
+# module is the toplevel of the git repo (is not a submodule itself)
+# internal means that the submodule is needed by the component whether
+# the component is toplevel or the submodule of another repo
+# required means that the submodule should always be checked out
+# optional means that the submodule should only be checked out if the
+# optional flag is provided to git-fleximod or the submodule name is
+# explicitly listed on the git-fleximod command line.
+#
+# fxsparse - this is a path to a git sparse checkout file indicating that the
+# submodule should be checked out in sparse mode
+#
+# fxDONOTUSEurl - this field is used by git-fleximod test to insure that the url is pointing
+# to the official url of the repo and not to an unofficial fork.
+# It is intended for use of github workflows to test commits to protected
+# repository branches.
+#
+[submodule "fates"]
+path = src/fates
+url = https://github.com/NGEET/fates
+fxtag = sci.1.77.1_api.36.0.0
+fxrequired = AlwaysRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/NCAR/fates-release
+
+[submodule "cism"]
+path = components/cism
+url = https://github.com/ESCOMP/CISM-wrapper
+fxtag = cismwrap_2_2_002
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/CISM-wrapper
+
+[submodule "rtm"]
+path = components/rtm
+url = https://github.com/ESCOMP/RTM
+fxtag = rtm1_0_80
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/RTM
+
+[submodule "mosart"]
+path = components/mosart
+url = https://github.com/ESCOMP/MOSART
+fxtag = mosart1.1.02
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/MOSART
+
+[submodule "mizuRoute"]
+path = components/mizuRoute
+url = https://github.com/ESCOMP/mizuRoute
+fxtag = cesm-coupling.n02_v2.1.2
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/mizuRoute
+
+[submodule "ccs_config"]
+path = ccs_config
+url = https://github.com/ESMCI/ccs_config_cesm.git
+fxtag = ccs_config_cesm1.0.0
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESMCI/ccs_config_cesm.git
+
+[submodule "cime"]
+path = cime
+url = https://github.com/ESMCI/cime
+fxtag = cime6.0.246
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESMCI/cime
+
+[submodule "cmeps"]
+path = components/cmeps
+url = https://github.com/ESCOMP/CMEPS.git
+fxtag = cmeps0.14.77
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/CMEPS.git
+
+[submodule "cdeps"]
+path = components/cdeps
+url = https://github.com/ESCOMP/CDEPS.git
+fxtag = cdeps1.0.34
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/CDEPS.git
+
+[submodule "share"]
+path = share
+url = https://github.com/ESCOMP/CESM_share
+fxtag = share1.0.19
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESCOMP/CESM_share
+
+[submodule "mct"]
+path = libraries/mct
+url = https://github.com/MCSclimate/MCT
+fxtag = MCT_2.11.0
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/MCSclimate/MCT
+
+[submodule "parallelio"]
+path = libraries/parallelio
+url = https://github.com/NCAR/ParallelIO
+fxtag = pio2_6_2
+fxrequired = ToplevelRequired
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/NCAR/ParallelIO
+
+[submodule "doc-builder"]
+path = doc/doc-builder
+url = https://github.com/ESMCI/doc-builder
+fxtag = v1.0.8
+fxrequired = ToplevelOptional
+# Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed
+fxDONOTUSEurl = https://github.com/ESMCI/doc-builder
diff --git a/.lib/git-fleximod/.github/workflows/pre-commit b/.lib/git-fleximod/.github/workflows/pre-commit
new file mode 100644
index 0000000000..1a6ad0082a
--- /dev/null
+++ b/.lib/git-fleximod/.github/workflows/pre-commit
@@ -0,0 +1,13 @@
+name: pre-commit
+on:
+ pull_request:
+ push:
+ branches: [main]
+
+jobs:
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v3
+ - uses: pre-commit/action@v3.0.0
diff --git a/.lib/git-fleximod/.github/workflows/pytest.yaml b/.lib/git-fleximod/.github/workflows/pytest.yaml
new file mode 100644
index 0000000000..0868dd9a33
--- /dev/null
+++ b/.lib/git-fleximod/.github/workflows/pytest.yaml
@@ -0,0 +1,77 @@
+# Run this job on pushes to `main`, and for pull requests. If you don't specify
+# `branches: [main], then this actions runs _twice_ on pull requests, which is
+# annoying.
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and
+ # reference the matrixe python version here.
+ - uses: actions/setup-python@v5
+ with:
+ python-version: '3.9'
+
+ # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow
+ # from installing Poetry every time, which can be slow. Note the use of the Poetry version
+ # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache
+ # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be
+ # mildly cleaner by using an environment variable, but I don't really care.
+ - name: cache poetry install
+ uses: actions/cache@v4
+ with:
+ path: ~/.local
+ key: poetry-1.7.1
+
+ # Install Poetry. You could do this manually, or there are several actions that do this.
+ # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to
+ # Poetry's default install script, which feels correct. I pin the Poetry version here
+ # because Poetry does occasionally change APIs between versions and I don't want my
+ # actions to break if it does.
+ #
+ # The key configuration value here is `virtualenvs-in-project: true`: this creates the
+ # venv as a `.venv` in your testing directory, which allows the next step to easily
+ # cache it.
+ - uses: snok/install-poetry@v1
+ with:
+ version: 1.7.1
+ virtualenvs-create: true
+ virtualenvs-in-project: true
+
+ # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache
+ # key: if you're using multiple Python versions, or multiple OSes, you'd need to include
+ # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock.
+ - name: cache deps
+ id: cache-deps
+ uses: actions/cache@v4
+ with:
+ path: .venv
+ key: pydeps-${{ hashFiles('**/poetry.lock') }}
+
+ # Install dependencies. `--no-root` means "install all dependencies but not the project
+ # itself", which is what you want to avoid caching _your_ code. The `if` statement
+ # ensures this only runs on a cache miss.
+ - run: poetry install --no-interaction --no-root
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+
+ # Now install _your_ project. This isn't necessary for many types of projects -- particularly
+ # things like Django apps don't need this. But it's a good idea since it fully-exercises the
+ # pyproject.toml and makes that if you add things like console-scripts at some point that
+ # they'll be installed and working.
+ - run: poetry install --no-interaction
+
+ # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml`
+ # so this line is super-simple. But it could be as complex as you need.
+ - run: |
+ git config --global user.name "${GITHUB_ACTOR}"
+ git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com"
+ poetry run pytest
+
diff --git a/.lib/git-fleximod/.pre-commit-config.yaml b/.lib/git-fleximod/.pre-commit-config.yaml
new file mode 100644
index 0000000000..2f6089da72
--- /dev/null
+++ b/.lib/git-fleximod/.pre-commit-config.yaml
@@ -0,0 +1,18 @@
+exclude: ^utils/.*$
+
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.0.1
+ hooks:
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+ - repo: https://github.com/psf/black
+ rev: 22.3.0
+ hooks:
+ - id: black
+ - repo: https://github.com/PyCQA/pylint
+ rev: v2.11.1
+ hooks:
+ - id: pylint
+ args:
+ - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member
diff --git a/.lib/git-fleximod/License b/.lib/git-fleximod/License
new file mode 100644
index 0000000000..88bc22515e
--- /dev/null
+++ b/.lib/git-fleximod/License
@@ -0,0 +1,20 @@
+Copyright 2024 NSF National Center for Atmospheric Sciences (NCAR)
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+“Software”), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.lib/git-fleximod/README.md b/.lib/git-fleximod/README.md
new file mode 100644
index 0000000000..53917da400
--- /dev/null
+++ b/.lib/git-fleximod/README.md
@@ -0,0 +1,108 @@
+# git-fleximod
+
+Flexible, Enhanced Submodule Management for Git
+
+## Overview
+
+Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way.
+
+## Installation
+
+ If you choose to locate git-fleximod in your path you can access it via command: git fleximod
+
+## Usage
+
+ Basic Usage:
+ git fleximod [options]
+ Available Commands:
+ status: Display the status of submodules.
+ update: Update submodules to the tag indicated in .gitmodules variable fxtag.
+ test: Make sure that fxtags and submodule hashes are consistant,
+ make sure that official urls (as defined by fxDONOTUSEurl) are set
+ make sure that fxtags are defined for all submodules
+ Additional Options:
+ See git fleximod --help for more details.
+
+## Supported .gitmodules Variables
+
+ fxtag: Specify a specific tag or branch to checkout for a submodule.
+ fxrequired: Mark a submodule's checkout behavior, with allowed values:
+ - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module).
+ - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module).
+ - AlwaysRequired: Always required (always checked out).
+ - AlwaysOptional: Always optional (checked out with --optional flag).
+ fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths.
+ fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks
+ **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be
+ changed by users. Use the url variable to change to a fork if desired.
+
+## Sparse Checkouts
+
+ To enable sparse checkout for a submodule, set the fxsparse variable
+ in the .gitmodules file to the path of a file containing the desired
+ sparse checkout paths. Git-fleximod will automatically configure
+ sparse checkout based on this file when applicable commands are run.
+ See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set)
+ for details on the format of this file.
+
+## Tests
+
+ The git fleximod test action is designed to be used by, for example, github workflows
+ to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags
+
+## Examples
+
+Here are some common usage examples:
+
+Update all submodules, including optional ones:
+```bash
+ git fleximod update --optional
+```
+
+Updating a specific submodule to the fxtag indicated in .gitmodules:
+
+```bash
+ git fleximod update submodule-name
+```
+Example .gitmodules entry:
+```ini, toml
+ [submodule "cosp2"]
+ path = src/physics/cosp2/src
+ url = https://github.com/CFMIP/COSPv2.0
+ fxsparse = ../.cosp_sparse_checkout
+ fxrequired = AlwaysRequired
+ fxtag = v2.1.4cesm
+```
+Explanation:
+
+This entry indicates that the submodule named cosp2 at tag v2.1.4cesm
+should be checked out into the directory src/physics/cosp2/src
+relative to the .gitmodules directory. It should be checked out from
+the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as
+described in the file ../.cosp_sparse_checkout relative to the path
+directory. It should be checked out anytime this .gitmodules entry is
+read.
+
+Additional example:
+```ini, toml
+ [submodule "cime"]
+ path = cime
+ url = https://github.com/jedwards4b/cime
+ fxrequired = ToplevelRequired
+ fxtag = cime6.0.198_rme01
+```
+
+Explanation:
+
+This entry indicates that the submodule cime should be checked out
+into a directory named cime at tag cime6.0.198_rme01 from the URL
+https://github.com/jedwards4b/cime. This should only be done if
+the .gitmodules file is at the top level of the repository clone.
+
+## Contributing
+
+We welcome contributions! Please see the CONTRIBUTING.md file for guidelines.
+
+## License
+
+Git-fleximod is released under the MIT License.
diff --git a/manage_externals/test/doc/Makefile b/.lib/git-fleximod/doc/Makefile
similarity index 75%
rename from manage_externals/test/doc/Makefile
rename to .lib/git-fleximod/doc/Makefile
index 18f4d5bf99..d4bb2cbb9e 100644
--- a/manage_externals/test/doc/Makefile
+++ b/.lib/git-fleximod/doc/Makefile
@@ -1,10 +1,10 @@
# Minimal makefile for Sphinx documentation
#
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-SPHINXPROJ = ManageExternals
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
@@ -17,4 +17,4 @@ help:
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/.lib/git-fleximod/doc/conf.py b/.lib/git-fleximod/doc/conf.py
new file mode 100644
index 0000000000..423099eec9
--- /dev/null
+++ b/.lib/git-fleximod/doc/conf.py
@@ -0,0 +1,26 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+
+project = "git-fleximod"
+author = "Jim Edwards "
+release = "0.4.0"
+
+# -- General configuration ---------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
+
+extensions = ["sphinx_argparse_cli"]
+
+templates_path = ["_templates"]
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
+
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = "alabaster"
+html_static_path = ["_static"]
diff --git a/.lib/git-fleximod/doc/index.rst b/.lib/git-fleximod/doc/index.rst
new file mode 100644
index 0000000000..0f9c1a7f7e
--- /dev/null
+++ b/.lib/git-fleximod/doc/index.rst
@@ -0,0 +1,24 @@
+.. git-fleximod documentation master file, created by
+ sphinx-quickstart on Sat Feb 3 12:02:22 2024.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to git-fleximod's documentation!
+========================================
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+.. module:: sphinxcontrib.autoprogram
+.. sphinx_argparse_cli::
+ :module: git_fleximod.cli
+ :func: get_parser
+ :prog: git-fleximod
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/.lib/git-fleximod/doc/make.bat b/.lib/git-fleximod/doc/make.bat
new file mode 100644
index 0000000000..32bb24529f
--- /dev/null
+++ b/.lib/git-fleximod/doc/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=.
+set BUILDDIR=_build
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/.lib/git-fleximod/escomp_install b/.lib/git-fleximod/escomp_install
new file mode 100644
index 0000000000..ae782e72a4
--- /dev/null
+++ b/.lib/git-fleximod/escomp_install
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# updates git-fleximod in an ESCOMP model
+# this script should be run from the model root directory, it expects
+# git-fleximod to already be installed with the script in bin
+# and the classes in lib/python/site-packages
+import sys
+import shutil
+import os
+
+from glob import iglob
+
+fleximod_root = sys.argv[1]
+fleximod_path = os.path.join(fleximod_root,"src","git-fleximod")
+if os.path.isfile(fleximod_path):
+ with open(fleximod_path,"r") as f:
+ fleximod = f.readlines()
+ with open(os.path.join(".","bin","git-fleximod"),"w") as f:
+ for line in fleximod:
+ f.write(line)
+ if "import argparse" in line:
+ f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n')
+
+ for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")):
+ shutil.copy(file,
+ os.path.join("lib","python","site-packages","fleximod",os.path.basename(file)))
diff --git a/manage_externals/test/repos/simple-ext.svn/db/rep-cache.db-journal b/.lib/git-fleximod/git_fleximod/__init__.py
similarity index 100%
rename from manage_externals/test/repos/simple-ext.svn/db/rep-cache.db-journal
rename to .lib/git-fleximod/git_fleximod/__init__.py
diff --git a/.lib/git-fleximod/git_fleximod/cli.py b/.lib/git-fleximod/git_fleximod/cli.py
new file mode 100644
index 0000000000..a15a226de4
--- /dev/null
+++ b/.lib/git-fleximod/git_fleximod/cli.py
@@ -0,0 +1,129 @@
+from pathlib import Path
+import argparse
+from git_fleximod import utils
+
+__version__ = "0.7.8"
+
+def find_root_dir(filename=".gitmodules"):
+ """ finds the highest directory in tree
+ which contains a file called filename """
+ d = Path.cwd()
+ root = Path(d.root)
+ dirlist = []
+ dl = d
+ while dl != root:
+ dirlist.append(dl)
+ dl = dl.parent
+ dirlist.append(root)
+ dirlist.reverse()
+
+ for dl in dirlist:
+ attempt = dl / filename
+ if attempt.is_file():
+ return str(dl)
+ return None
+
+
+def get_parser():
+ description = """
+ %(prog)s manages checking out groups of gitsubmodules with additional support for Earth System Models
+ """
+ parser = argparse.ArgumentParser(
+ description=description, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ #
+ # user options
+ #
+ choices = ["update", "status", "test"]
+ parser.add_argument(
+ "action",
+ choices=choices,
+ default="update",
+ help=f"Subcommand of git-fleximod, choices are {choices[:-1]}",
+ )
+
+ parser.add_argument(
+ "components",
+ nargs="*",
+ help="Specific component(s) to checkout. By default, "
+ "all required submodules are checked out.",
+ )
+
+ parser.add_argument(
+ "-C",
+ "--path",
+ default=find_root_dir(),
+ help="Toplevel repository directory. Defaults to top git directory relative to current.",
+ )
+
+ parser.add_argument(
+ "-g",
+ "--gitmodules",
+ nargs="?",
+ default=".gitmodules",
+ help="The submodule description filename. " "Default: %(default)s.",
+ )
+
+ parser.add_argument(
+ "-x",
+ "--exclude",
+ nargs="*",
+ help="Component(s) listed in the gitmodules file which should be ignored.",
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ default=False,
+ help="Override cautions and update or checkout over locally modified repository.",
+ )
+
+ parser.add_argument(
+ "-o",
+ "--optional",
+ action="store_true",
+ default=False,
+ help="By default only the required submodules "
+ "are checked out. This flag will also checkout the "
+ "optional submodules relative to the toplevel directory.",
+ )
+
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="count",
+ default=0,
+ help="Output additional information to "
+ "the screen and log file. This flag can be "
+ "used up to two times, increasing the "
+ "verbosity level each time.",
+ )
+
+ parser.add_argument(
+ "-V",
+ "--version",
+ action="version",
+ version=f"%(prog)s {__version__}",
+ help="Print version and exit.",
+ )
+
+ #
+ # developer options
+ #
+ parser.add_argument(
+ "--backtrace",
+ action="store_true",
+ help="DEVELOPER: show exception backtraces as extra " "debugging output",
+ )
+
+ parser.add_argument(
+ "-d",
+ "--debug",
+ action="store_true",
+ default=False,
+ help="DEVELOPER: output additional debugging "
+ "information to the screen and log file.",
+ )
+
+ return parser
diff --git a/.lib/git-fleximod/git_fleximod/git_fleximod.py b/.lib/git-fleximod/git_fleximod/git_fleximod.py
new file mode 100755
index 0000000000..e1b8f484a5
--- /dev/null
+++ b/.lib/git-fleximod/git_fleximod/git_fleximod.py
@@ -0,0 +1,624 @@
+#!/usr/bin/env python
+import sys
+
+MIN_PYTHON = (3, 7)
+if sys.version_info < MIN_PYTHON:
+ sys.exit("Python %s.%s or later is required." % MIN_PYTHON)
+
+import os
+import shutil
+import logging
+import textwrap
+from git_fleximod import utils
+from git_fleximod import cli
+from git_fleximod.gitinterface import GitInterface
+from git_fleximod.gitmodules import GitModules
+from configparser import NoOptionError
+
+# logger variable is global
+logger = None
+
+
+def fxrequired_allowed_values():
+ return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"]
+
+
+def commandline_arguments(args=None):
+ parser = cli.get_parser()
+
+ if args:
+ options = parser.parse_args(args)
+ else:
+ options = parser.parse_args()
+
+ # explicitly listing a component overrides the optional flag
+ if options.optional or options.components:
+ fxrequired = [
+ "ToplevelRequired",
+ "ToplevelOptional",
+ "AlwaysRequired",
+ "AlwaysOptional",
+ ]
+ else:
+ fxrequired = ["ToplevelRequired", "AlwaysRequired"]
+
+ action = options.action
+ if not action:
+ action = "update"
+ handlers = [logging.StreamHandler()]
+
+ if options.debug:
+ try:
+ open("fleximod.log", "w")
+ except PermissionError:
+ sys.exit("ABORT: Could not write file fleximod.log")
+ level = logging.DEBUG
+ handlers.append(logging.FileHandler("fleximod.log"))
+ elif options.verbose:
+ level = logging.INFO
+ else:
+ level = logging.WARNING
+ # Configure the root logger
+ logging.basicConfig(
+ level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers
+ )
+
+ if hasattr(options, "version"):
+ exit()
+
+ return (
+ options.path,
+ options.gitmodules,
+ fxrequired,
+ options.components,
+ options.exclude,
+ options.force,
+ action,
+ )
+
+
+def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"):
+ """
+ This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq
+ in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again.
+ Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important
+ because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time
+ and disk space consuming.
+
+ Parameters:
+ root_dir (str): The root directory for the git operation.
+ name (str): The name of the submodule.
+ url (str): The URL of the submodule.
+ path (str): The path to the submodule.
+ sparsefile (str): The sparse file for the submodule.
+ tag (str, optional): The tag to checkout. Defaults to "master".
+
+ Returns:
+ None
+ """
+ logger.info("Called sparse_checkout for {}".format(name))
+ rgit = GitInterface(root_dir, logger)
+ superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree")
+ if superroot:
+ gitroot = superroot.strip()
+ else:
+ gitroot = root_dir.strip()
+ assert os.path.isdir(os.path.join(gitroot, ".git"))
+ # first create the module directory
+ if not os.path.isdir(os.path.join(root_dir, path)):
+ os.makedirs(os.path.join(root_dir, path))
+
+ # initialize a new git repo and set the sparse checkout flag
+ sprep_repo = os.path.join(root_dir, path)
+ sprepo_git = GitInterface(sprep_repo, logger)
+ if os.path.exists(os.path.join(sprep_repo, ".git")):
+ try:
+ logger.info("Submodule {} found".format(name))
+ chk = sprepo_git.config_get_value("core", "sparseCheckout")
+ if chk == "true":
+ logger.info("Sparse submodule {} already checked out".format(name))
+ return
+ except NoOptionError:
+ logger.debug("Sparse submodule {} not present".format(name))
+ except Exception as e:
+ utils.fatal_error("Unexpected error {} occured.".format(e))
+
+ sprepo_git.config_set_value("core", "sparseCheckout", "true")
+
+ # set the repository remote
+
+ logger.info("Setting remote origin in {}/{}".format(root_dir, path))
+ status = sprepo_git.git_operation("remote", "-v")
+ if url not in status:
+ sprepo_git.git_operation("remote", "add", "origin", url)
+
+ topgit = os.path.join(gitroot, ".git")
+
+ if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")):
+ with open(os.path.join(root_dir, ".git")) as f:
+ gitpath = os.path.relpath(
+ os.path.join(root_dir, f.read().split()[1]),
+ start=os.path.join(root_dir, path),
+ )
+ topgit = os.path.join(gitpath, "modules")
+ else:
+ topgit = os.path.relpath(
+ os.path.join(root_dir, ".git", "modules"),
+ start=os.path.join(root_dir, path),
+ )
+
+ with utils.pushd(sprep_repo):
+ if not os.path.isdir(topgit):
+ os.makedirs(topgit)
+ topgit += os.sep + name
+
+ if os.path.isdir(os.path.join(root_dir, path, ".git")):
+ with utils.pushd(sprep_repo):
+ shutil.move(".git", topgit)
+ with open(".git", "w") as f:
+ f.write("gitdir: " + os.path.relpath(topgit))
+ # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo)))
+ gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout"))
+ if os.path.isfile(gitsparse):
+ logger.warning(
+ "submodule {} is already initialized {}".format(name, topgit)
+ )
+ return
+
+ with utils.pushd(sprep_repo):
+ shutil.copy(sparsefile, gitsparse)
+
+ # Finally checkout the repo
+ sprepo_git.git_operation("fetch", "origin", "--tags")
+ sprepo_git.git_operation("checkout", tag)
+
+ print(f"Successfully checked out {name:>20} at {tag}")
+ rgit.config_set_value(f'submodule "{name}"', "active", "true")
+ rgit.config_set_value(f'submodule "{name}"', "url", url)
+
+
+def single_submodule_checkout(
+ root, name, path, url=None, tag=None, force=False, optional=False
+):
+ """
+ This function checks out a single git submodule.
+
+ Parameters:
+ root (str): The root directory for the git operation.
+ name (str): The name of the submodule.
+ path (str): The path to the submodule.
+ url (str, optional): The URL of the submodule. Defaults to None.
+ tag (str, optional): The tag to checkout. Defaults to None.
+ force (bool, optional): If set to True, forces the checkout operation. Defaults to False.
+ optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False.
+
+ Returns:
+ None
+ """
+ # function implementation...
+ git = GitInterface(root, logger)
+ repodir = os.path.join(root, path)
+ logger.info("Checkout {} into {}/{}".format(name, root, path))
+ # if url is provided update to the new url
+ tmpurl = None
+ repo_exists = False
+ if os.path.exists(os.path.join(repodir, ".git")):
+ logger.info("Submodule {} already checked out".format(name))
+ repo_exists = True
+ # Look for a .gitmodules file in the newly checkedout repo
+ if not repo_exists and url:
+ # ssh urls cause problems for those who dont have git accounts with ssh keys defined
+ # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was
+ # opened with a GitModules object we don't need to worry about restoring the file here
+ # it will be done by the GitModules class
+ if url.startswith("git@"):
+ tmpurl = url
+ url = url.replace("git@github.com:", "https://github.com/")
+ git.git_operation("clone", url, path)
+ smgit = GitInterface(repodir, logger)
+ if not tag:
+ tag = smgit.git_operation("describe", "--tags", "--always").rstrip()
+ smgit.git_operation("checkout", tag)
+ # Now need to move the .git dir to the submodule location
+ rootdotgit = os.path.join(root, ".git")
+ if os.path.isfile(rootdotgit):
+ with open(rootdotgit) as f:
+ line = f.readline()
+ if line.startswith("gitdir: "):
+ rootdotgit = line[8:].rstrip()
+
+ newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name))
+ if os.path.exists(newpath):
+ shutil.rmtree(os.path.join(repodir, ".git"))
+ else:
+ shutil.move(os.path.join(repodir, ".git"), newpath)
+
+ with open(os.path.join(repodir, ".git"), "w") as f:
+ f.write("gitdir: " + os.path.relpath(newpath, start=repodir))
+
+ if not os.path.exists(repodir):
+ parent = os.path.dirname(repodir)
+ if not os.path.isdir(parent):
+ os.makedirs(parent)
+ git.git_operation("submodule", "add", "--name", name, "--", url, path)
+
+ if not repo_exists or not tmpurl:
+ git.git_operation("submodule", "update", "--init", "--", path)
+
+ if os.path.exists(os.path.join(repodir, ".gitmodules")):
+ # recursively handle this checkout
+ print(f"Recursively checking out submodules of {name}")
+ gitmodules = GitModules(logger, confpath=repodir)
+ requiredlist = ["AlwaysRequired"]
+ if optional:
+ requiredlist.append("AlwaysOptional")
+ submodules_checkout(gitmodules, repodir, requiredlist, force=force)
+ if not os.path.exists(os.path.join(repodir, ".git")):
+ utils.fatal_error(
+ f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}"
+ )
+
+ if tmpurl:
+ print(git.git_operation("restore", ".gitmodules"))
+
+ return
+
+def add_remote(git, url):
+ remotes = git.git_operation("remote", "-v")
+ newremote = "newremote.00"
+ if url in remotes:
+ for line in remotes:
+ if url in line and "fetch" in line:
+ newremote = line.split()[0]
+ break
+ else:
+ i = 0
+ while "newremote" in remotes:
+ i = i + 1
+ newremote = f"newremote.{i:02d}"
+ git.git_operation("remote", "add", newremote, url)
+ return newremote
+
+def submodules_status(gitmodules, root_dir, toplevel=False):
+ testfails = 0
+ localmods = 0
+ needsupdate = 0
+ for name in gitmodules.sections():
+ path = gitmodules.get(name, "path")
+ tag = gitmodules.get(name, "fxtag")
+ url = gitmodules.get(name, "url")
+ required = gitmodules.get(name, "fxrequired")
+ level = required and "Toplevel" in required
+ if not path:
+ utils.fatal_error("No path found in .gitmodules for {}".format(name))
+ newpath = os.path.join(root_dir, path)
+ logger.debug("newpath is {}".format(newpath))
+ if not os.path.exists(os.path.join(newpath, ".git")):
+ rootgit = GitInterface(root_dir, logger)
+ # submodule commands use path, not name
+ url = url.replace("git@github.com:", "https://github.com/")
+ tags = rootgit.git_operation("ls-remote", "--tags", url)
+ result = rootgit.git_operation("submodule","status",newpath).split()
+ ahash = None
+ if result:
+ ahash = result[0][1:]
+ hhash = None
+ atag = None
+
+ needsupdate += 1
+ if not toplevel and level:
+ continue
+ for htag in tags.split("\n"):
+ if htag.endswith('^{}'):
+ htag = htag[:-3]
+ if ahash and not atag and ahash in htag:
+ atag = (htag.split()[1])[10:]
+ if tag and not hhash and htag.endswith(tag):
+ hhash = htag.split()[0]
+ if hhash and atag:
+ break
+ optional = " (optional)" if required and "Optional" in required else ""
+ if tag and (ahash == hhash or atag == tag):
+ print(f"e {name:>20} not checked out, aligned at tag {tag}{optional}")
+ elif tag:
+ ahash = rootgit.git_operation(
+ "submodule", "status", "{}".format(path)
+ ).rstrip()
+ ahash = ahash[1 : len(tag) + 1]
+ if tag == ahash:
+ print(f"e {name:>20} not checked out, aligned at hash {ahash}{optional}")
+ else:
+ print(
+ f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}{optional}"
+ )
+ testfails += 1
+ else:
+ print(f"e {name:>20} has no fxtag defined in .gitmodules{optional}")
+ testfails += 1
+ else:
+ with utils.pushd(newpath):
+ git = GitInterface(newpath, logger)
+ atag = git.git_operation("describe", "--tags", "--always").rstrip()
+ ahash = git.git_operation("rev-list", "HEAD").partition("\n")[0]
+ rurl = git.git_operation("ls-remote","--get-url").rstrip()
+ if rurl != url:
+ remote = add_remote(git, url)
+ git.git_operation("fetch", remote)
+ if tag and atag == tag:
+ print(f" {name:>20} at tag {tag}")
+ elif tag and ahash[: len(tag)] == tag:
+ print(f" {name:>20} at hash {ahash}")
+ elif atag == ahash:
+ print(f" {name:>20} at hash {ahash}")
+ elif tag:
+ print(
+ f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}"
+ )
+ testfails += 1
+ needsupdate += 1
+ else:
+ print(
+ f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}"
+ )
+ testfails += 1
+
+ status = git.git_operation("status", "--ignore-submodules", "-uno")
+ if "nothing to commit" not in status:
+ localmods = localmods + 1
+ print("M" + textwrap.indent(status, " "))
+
+ return testfails, localmods, needsupdate
+
+
+def submodules_update(gitmodules, root_dir, requiredlist, force):
+ _, localmods, needsupdate = submodules_status(gitmodules, root_dir)
+
+ if localmods and not force:
+ local_mods_output()
+ return
+ if needsupdate == 0:
+ return
+
+ for name in gitmodules.sections():
+ fxtag = gitmodules.get(name, "fxtag")
+ path = gitmodules.get(name, "path")
+ url = gitmodules.get(name, "url")
+ logger.info(
+ "name={} path={} url={} fxtag={} requiredlist={} ".format(
+ name, os.path.join(root_dir, path), url, fxtag, requiredlist
+ )
+ )
+
+ fxrequired = gitmodules.get(name, "fxrequired")
+ assert fxrequired in fxrequired_allowed_values()
+ rgit = GitInterface(root_dir, logger)
+ superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree")
+
+ fxsparse = gitmodules.get(name, "fxsparse")
+
+ if (
+ fxrequired
+ and (superroot and "Toplevel" in fxrequired)
+ or fxrequired not in requiredlist
+ ):
+ if "ToplevelOptional" == fxrequired:
+ print("Skipping optional component {}".format(name))
+ continue
+ if fxsparse:
+ logger.debug(
+ "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format(
+ root_dir, name, url, path, fxsparse, fxtag
+ )
+ )
+ submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag)
+ else:
+ logger.info(
+ "Calling submodule_checkout({},{},{},{})".format(
+ root_dir, name, path, url
+ )
+ )
+
+ single_submodule_checkout(
+ root_dir,
+ name,
+ path,
+ url=url,
+ tag=fxtag,
+ force=force,
+ optional=("AlwaysOptional" in requiredlist),
+ )
+
+ if os.path.exists(os.path.join(path, ".git")):
+ submoddir = os.path.join(root_dir, path)
+ with utils.pushd(submoddir):
+ git = GitInterface(submoddir, logger)
+ # first make sure the url is correct
+ upstream = git.git_operation("ls-remote", "--get-url").rstrip()
+ newremote = "origin"
+ if upstream != url:
+ add_remote(git, url)
+
+ tags = git.git_operation("tag", "-l")
+ if fxtag and fxtag not in tags:
+ git.git_operation("fetch", newremote, "--tags")
+ atag = git.git_operation("describe", "--tags", "--always").rstrip()
+ if fxtag and fxtag != atag:
+ try:
+ git.git_operation("checkout", fxtag)
+ print(f"{name:>20} updated to {fxtag}")
+ except Exception as error:
+ print(error)
+ elif not fxtag:
+ print(f"No fxtag found for submodule {name:>20}")
+ else:
+ print(f"{name:>20} up to date.")
+
+
+
+
+def local_mods_output():
+ text = """\
+ The submodules labeled with 'M' above are not in a clean state.
+ The following are options for how to proceed:
+ (1) Go into each submodule which is not in a clean state and issue a 'git status'
+ Either revert or commit your changes so that the submodule is in a clean state.
+ (2) use the --force option to git-fleximod
+ (3) you can name the particular submodules to update using the git-fleximod command line
+ (4) As a last resort you can remove the submodule (via 'rm -fr [directory]')
+ then rerun git-fleximod update.
+"""
+ print(text)
+
+
+# checkout is done by update if required so this function may be depricated
+def submodules_checkout(gitmodules, root_dir, requiredlist, force=False):
+ """
+ This function checks out all git submodules based on the provided parameters.
+
+ Parameters:
+ gitmodules (ConfigParser): The gitmodules configuration.
+ root_dir (str): The root directory for the git operation.
+ requiredlist (list): The list of required modules.
+ force (bool, optional): If set to True, forces the checkout operation. Defaults to False.
+
+ Returns:
+ None
+ """
+ # function implementation...
+ print("")
+ _, localmods, needsupdate = submodules_status(gitmodules, root_dir)
+ if localmods and not force:
+ local_mods_output()
+ return
+ if not needsupdate:
+ return
+ for name in gitmodules.sections():
+ fxrequired = gitmodules.get(name, "fxrequired")
+ fxsparse = gitmodules.get(name, "fxsparse")
+ fxtag = gitmodules.get(name, "fxtag")
+ path = gitmodules.get(name, "path")
+ url = gitmodules.get(name, "url")
+ if fxrequired and fxrequired not in requiredlist:
+ if "Optional" in fxrequired:
+ print("Skipping optional component {}".format(name))
+ continue
+
+ if fxsparse:
+ logger.debug(
+ "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format(
+ root_dir, name, url, path, fxsparse, fxtag
+ )
+ )
+ submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag)
+ else:
+ logger.debug(
+ "Calling submodule_checkout({},{},{})".format(root_dir, name, path)
+ )
+ single_submodule_checkout(
+ root_dir,
+ name,
+ path,
+ url=url,
+ tag=fxtag,
+ force=force,
+ optional="AlwaysOptional" in requiredlist,
+ )
+
+
+def submodules_test(gitmodules, root_dir):
+ """
+ This function tests the git submodules based on the provided parameters.
+
+ It first checks that fxtags are present and in sync with submodule hashes.
+ Then it ensures that urls are consistent with fxurls (not forks and not ssh)
+ and that sparse checkout files exist.
+
+ Parameters:
+ gitmodules (ConfigParser): The gitmodules configuration.
+ root_dir (str): The root directory for the git operation.
+
+ Returns:
+ int: The number of test failures.
+ """
+ # First check that fxtags are present and in sync with submodule hashes
+ testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir)
+ print("")
+ # Then make sure that urls are consistant with fxurls (not forks and not ssh)
+ # and that sparse checkout files exist
+ for name in gitmodules.sections():
+ url = gitmodules.get(name, "url")
+ fxurl = gitmodules.get(name, "fxDONOTMODIFYurl")
+ fxsparse = gitmodules.get(name, "fxsparse")
+ path = gitmodules.get(name, "path")
+ fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl
+ url = url[:-4] if url.endswith(".git") else url
+ if not fxurl or url.lower() != fxurl.lower():
+ print(f"{name:>20} url {url} not in sync with required {fxurl}")
+ testfails += 1
+ if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)):
+ print(f"{name:>20} sparse checkout file {fxsparse} not found")
+ testfails += 1
+ return testfails + localmods + needsupdate
+
+
+def main():
+ (
+ root_dir,
+ file_name,
+ fxrequired,
+ includelist,
+ excludelist,
+ force,
+ action,
+ ) = commandline_arguments()
+ # Get a logger for the package
+ global logger
+ logger = logging.getLogger(__name__)
+
+ logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name))
+
+ if not root_dir or not os.path.isfile(os.path.join(root_dir, file_name)):
+ if root_dir:
+ file_path = utils.find_upwards(root_dir, file_name)
+
+ if root_dir is None or file_path is None:
+ root_dir = "."
+ utils.fatal_error(
+ "No {} found in {} or any of it's parents".format(file_name, root_dir)
+ )
+
+ root_dir = os.path.dirname(file_path)
+ logger.info(
+ "root_dir is {} includelist={} excludelist={}".format(
+ root_dir, includelist, excludelist
+ )
+ )
+ gitmodules = GitModules(
+ logger,
+ confpath=root_dir,
+ conffile=file_name,
+ includelist=includelist,
+ excludelist=excludelist,
+ )
+ if not gitmodules.sections():
+ sys.exit("No submodule components found")
+ retval = 0
+ if action == "update":
+ submodules_update(gitmodules, root_dir, fxrequired, force)
+ elif action == "status":
+ tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True)
+ if tfails + lmods + updates > 0:
+ print(
+ f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n"
+ )
+ if lmods > 0:
+ local_mods_output()
+ elif action == "test":
+ retval = submodules_test(gitmodules, root_dir)
+ else:
+ utils.fatal_error(f"unrecognized action request {action}")
+ return retval
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/.lib/git-fleximod/git_fleximod/gitinterface.py b/.lib/git-fleximod/git_fleximod/gitinterface.py
new file mode 100644
index 0000000000..93ae38ecde
--- /dev/null
+++ b/.lib/git-fleximod/git_fleximod/gitinterface.py
@@ -0,0 +1,79 @@
+import os
+import sys
+from . import utils
+from pathlib import Path
+
+class GitInterface:
+ def __init__(self, repo_path, logger):
+ logger.debug("Initialize GitInterface for {}".format(repo_path))
+ if isinstance(repo_path, str):
+ self.repo_path = Path(repo_path).resolve()
+ elif isinstance(repo_path, Path):
+ self.repo_path = repo_path.resolve()
+ else:
+ raise TypeError("repo_path must be a str or Path object")
+ self.logger = logger
+ try:
+ import git
+
+ self._use_module = True
+ try:
+ self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo
+ except git.exc.InvalidGitRepositoryError:
+ self.git = git
+ self._init_git_repo()
+ msg = "Using GitPython interface to git"
+ except ImportError:
+ self._use_module = False
+ if not (self.repo_path / ".git").exists():
+ self._init_git_repo()
+ msg = "Using shell interface to git"
+ self.logger.info(msg)
+
+ def _git_command(self, operation, *args):
+ self.logger.info(operation)
+ if self._use_module and operation != "submodule":
+ try:
+ return getattr(self.repo.git, operation)(*args)
+ except Exception as e:
+ sys.exit(e)
+ else:
+ return ["git", "-C", str(self.repo_path), operation] + list(args)
+
+ def _init_git_repo(self):
+ if self._use_module:
+ self.repo = self.git.Repo.init(str(self.repo_path))
+ else:
+ command = ("git", "-C", str(self.repo_path), "init")
+ utils.execute_subprocess(command)
+
+ # pylint: disable=unused-argument
+ def git_operation(self, operation, *args, **kwargs):
+ command = self._git_command(operation, *args)
+ self.logger.info(command)
+ if isinstance(command, list):
+ try:
+ return utils.execute_subprocess(command, output_to_caller=True)
+ except Exception as e:
+ sys.exit(e)
+ else:
+ return command
+
+ def config_get_value(self, section, name):
+ if self._use_module:
+ config = self.repo.config_reader()
+ return config.get_value(section, name)
+ else:
+ cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}")
+ output = utils.execute_subprocess(cmd, output_to_caller=True)
+ return output.strip()
+
+ def config_set_value(self, section, name, value):
+ if self._use_module:
+ with self.repo.config_writer() as writer:
+ writer.set_value(section, name, value)
+ writer.release() # Ensure changes are saved
+ else:
+ cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value)
+ self.logger.info(cmd)
+ utils.execute_subprocess(cmd, output_to_caller=True)
diff --git a/.lib/git-fleximod/git_fleximod/gitmodules.py b/.lib/git-fleximod/git_fleximod/gitmodules.py
new file mode 100644
index 0000000000..7e4e05394a
--- /dev/null
+++ b/.lib/git-fleximod/git_fleximod/gitmodules.py
@@ -0,0 +1,97 @@
+import shutil
+from pathlib import Path
+from configparser import RawConfigParser, ConfigParser
+from .lstripreader import LstripReader
+
+
+class GitModules(RawConfigParser):
+ def __init__(
+ self,
+ logger,
+ confpath=Path.cwd(),
+ conffile=".gitmodules",
+ includelist=None,
+ excludelist=None,
+ ):
+ """
+ confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory).
+ conffile: Name of the configuration file (defaults to .gitmodules).
+ includelist: Optional list of submodules to include.
+ excludelist: Optional list of submodules to exclude.
+ """
+ self.logger = logger
+ self.logger.debug(
+ "Creating a GitModules object {} {} {} {}".format(
+ confpath, conffile, includelist, excludelist
+ )
+ )
+ super().__init__()
+ self.conf_file = (Path(confpath) / Path(conffile))
+ if self.conf_file.exists():
+ self.read_file(LstripReader(str(self.conf_file)), source=conffile)
+ self.includelist = includelist
+ self.excludelist = excludelist
+ self.isdirty = False
+
+ def reload(self):
+ self.clear()
+ if self.conf_file.exists():
+ self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file)
+
+
+ def set(self, name, option, value):
+ """
+ Sets a configuration value for a specific submodule:
+ Ensures the appropriate section exists for the submodule.
+ Calls the parent class's set method to store the value.
+ """
+ self.isdirty = True
+ self.logger.debug("set called {} {} {}".format(name, option, value))
+ section = f'submodule "{name}"'
+ if not self.has_section(section):
+ self.add_section(section)
+ super().set(section, option, str(value))
+
+ # pylint: disable=redefined-builtin, arguments-differ
+ def get(self, name, option, raw=False, vars=None, fallback=None):
+ """
+ Retrieves a configuration value for a specific submodule:
+ Uses the parent class's get method to access the value.
+ Handles potential errors if the section or option doesn't exist.
+ """
+ self.logger.debug("git get called {} {}".format(name, option))
+ section = f'submodule "{name}"'
+ try:
+ return ConfigParser.get(
+ self, section, option, raw=raw, vars=vars, fallback=fallback
+ )
+ except ConfigParser.NoOptionError:
+ return None
+
+ def save(self):
+ if self.isdirty:
+ self.logger.info("Writing {}".format(self.conf_file))
+ with open(self.conf_file, "w") as fd:
+ self.write(fd)
+ self.isdirty = False
+
+ def __del__(self):
+ self.save()
+
+ def sections(self):
+ """Strip the submodule part out of section and just use the name"""
+ self.logger.debug("calling GitModules sections iterator")
+ names = []
+ for section in ConfigParser.sections(self):
+ name = section[11:-1]
+ if self.includelist and name not in self.includelist:
+ continue
+ if self.excludelist and name in self.excludelist:
+ continue
+ names.append(name)
+ return names
+
+ def items(self, name, raw=False, vars=None):
+ self.logger.debug("calling GitModules items for {}".format(name))
+ section = f'submodule "{name}"'
+ return ConfigParser.items(section, raw=raw, vars=vars)
diff --git a/.lib/git-fleximod/git_fleximod/lstripreader.py b/.lib/git-fleximod/git_fleximod/lstripreader.py
new file mode 100644
index 0000000000..01d5580ee8
--- /dev/null
+++ b/.lib/git-fleximod/git_fleximod/lstripreader.py
@@ -0,0 +1,43 @@
+class LstripReader(object):
+ "LstripReader formats .gitmodules files to be acceptable for configparser"
+
+ def __init__(self, filename):
+ with open(filename, "r") as infile:
+ lines = infile.readlines()
+ self._lines = list()
+ self._num_lines = len(lines)
+ self._index = 0
+ for line in lines:
+ self._lines.append(line.lstrip())
+
+ def readlines(self):
+ """Return all the lines from this object's file"""
+ return self._lines
+
+ def readline(self, size=-1):
+ """Format and return the next line or raise StopIteration"""
+ try:
+ line = self.next()
+ except StopIteration:
+ line = ""
+
+ if (size > 0) and (len(line) < size):
+ return line[0:size]
+
+ return line
+
+ def __iter__(self):
+ """Begin an iteration"""
+ self._index = 0
+ return self
+
+ def next(self):
+ """Return the next line or raise StopIteration"""
+ if self._index >= self._num_lines:
+ raise StopIteration
+
+ self._index = self._index + 1
+ return self._lines[self._index - 1]
+
+ def __next__(self):
+ return self.next()
diff --git a/.lib/git-fleximod/git_fleximod/metoflexi.py b/.lib/git-fleximod/git_fleximod/metoflexi.py
new file mode 100755
index 0000000000..cc347db2dd
--- /dev/null
+++ b/.lib/git-fleximod/git_fleximod/metoflexi.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+from configparser import ConfigParser
+import sys
+import shutil
+from pathlib import Path
+import argparse
+import logging
+from git_fleximod.gitinterface import GitInterface
+from git_fleximod.gitmodules import GitModules
+from git_fleximod import utils
+
+logger = None
+
+def find_root_dir(filename=".git"):
+ d = Path.cwd()
+ root = Path(d.root)
+ while d != root:
+ attempt = d / filename
+ if attempt.is_dir():
+ return d
+ d = d.parent
+ return None
+
+
+def get_parser():
+ description = """
+ %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models
+ """
+ parser = argparse.ArgumentParser(
+ description=description, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ parser.add_argument('-e', '--externals', nargs='?',
+ default='Externals.cfg',
+ help='The externals description filename. '
+ 'Default: %(default)s.')
+
+ parser.add_argument(
+ "-C",
+ "--path",
+ default=find_root_dir(),
+ help="Toplevel repository directory. Defaults to top git directory relative to current.",
+ )
+
+ parser.add_argument(
+ "-g",
+ "--gitmodules",
+ nargs="?",
+ default=".gitmodules",
+ help="The submodule description filename. " "Default: %(default)s.",
+ )
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="count",
+ default=0,
+ help="Output additional information to "
+ "the screen and log file. This flag can be "
+ "used up to two times, increasing the "
+ "verbosity level each time.",
+ )
+ parser.add_argument(
+ "-d",
+ "--debug",
+ action="store_true",
+ default=False,
+ help="DEVELOPER: output additional debugging "
+ "information to the screen and log file.",
+ )
+
+ return parser
+
+def commandline_arguments(args=None):
+ parser = get_parser()
+
+ options = parser.parse_args(args)
+ handlers = [logging.StreamHandler()]
+
+ if options.debug:
+ try:
+ open("fleximod.log", "w")
+ except PermissionError:
+ sys.exit("ABORT: Could not write file fleximod.log")
+ level = logging.DEBUG
+ handlers.append(logging.FileHandler("fleximod.log"))
+ elif options.verbose:
+ level = logging.INFO
+ else:
+ level = logging.WARNING
+ # Configure the root logger
+ logging.basicConfig(
+ level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers
+ )
+
+ return(
+ options.path,
+ options.gitmodules,
+ options.externals
+ )
+
+class ExternalRepoTranslator:
+ """
+ Translates external repositories configured in an INI-style externals file.
+ """
+
+ def __init__(self, rootpath, gitmodules, externals):
+ self.rootpath = rootpath
+ if gitmodules:
+ self.gitmodules = GitModules(logger, confpath=rootpath)
+ self.externals = (rootpath / Path(externals)).resolve()
+ print(f"Translating {self.externals}")
+ self.git = GitInterface(rootpath, logger)
+
+# def __del__(self):
+# if (self.rootpath / "save.gitignore"):
+
+
+ def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol):
+ """
+ Translates a single repository based on configuration details.
+
+ Args:
+ rootpath (str): Root path of the main repository.
+ gitmodules (str): Path to the .gitmodules file.
+ tag (str): The tag to use for the external repository.
+ url (str): The URL of the external repository.
+ path (str): The relative path within the main repository for the external repository.
+ efile (str): The external file or file containing submodules.
+ hash_ (str): The commit hash to checkout (if applicable).
+ sparse (str): Boolean indicating whether to use sparse checkout (if applicable).
+ protocol (str): The protocol to use (e.g., 'git', 'http').
+ """
+ assert protocol != "svn", "SVN protocol is not currently supported"
+ print(f"Translating repository {section}")
+ if efile:
+ file_path = Path(path) / Path(efile)
+ newroot = (self.rootpath / file_path).parent.resolve()
+ if not newroot.exists():
+ newroot.mkdir(parents=True)
+ logger.info("Newroot is {}".format(newroot))
+ newt = ExternalRepoTranslator(newroot, ".gitmodules", efile)
+ newt.translate_repo()
+ if protocol == "externals_only":
+ if tag:
+ self.gitmodules.set(section, "fxtag", tag)
+ if hash_:
+ self.gitmodules.set(section, "fxtag", hash_)
+
+ self.gitmodules.set(section, "fxDONOTUSEurl", url)
+ if sparse:
+ self.gitmodules.set(section, "fxsparse", sparse)
+ self.gitmodules.set(section, "fxrequired", "ToplevelRequired")
+ else:
+ newpath = (self.rootpath / Path(path))
+ if newpath.exists():
+ shutil.rmtree(newpath)
+ logger.info("Creating directory {}".format(newpath))
+ newpath.mkdir(parents=True)
+ if tag:
+ logger.info("cloning {}".format(section))
+ try:
+ self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path)
+ except:
+ self.git.git_operation("clone", url, path)
+ with utils.pushd(newpath):
+ ngit = GitInterface(newpath, logger)
+ ngit.git_operation("checkout", tag)
+ if hash_:
+ self.git.git_operation("clone", url, path)
+ git = GitInterface(newpath, logger)
+ git.git_operation("fetch", "origin")
+ git.git_operation("checkout", hash_)
+ if sparse:
+ print("setting as sparse submodule {}".format(section))
+ sparsefile = (newpath / Path(sparse))
+ newfile = (newpath / ".git" / "info" / "sparse-checkout")
+ print(f"sparsefile {sparsefile} newfile {newfile}")
+ shutil.copy(sparsefile, newfile)
+
+ logger.info("adding submodule {}".format(section))
+ self.gitmodules.save()
+ self.git.git_operation("submodule", "add", "-f", "--name", section, url, path)
+ self.git.git_operation("submodule","absorbgitdirs")
+ self.gitmodules.reload()
+ if tag:
+ self.gitmodules.set(section, "fxtag", tag)
+ if hash_:
+ self.gitmodules.set(section, "fxtag", hash_)
+
+ self.gitmodules.set(section, "fxDONOTUSEurl", url)
+ if sparse:
+ self.gitmodules.set(section, "fxsparse", sparse)
+ self.gitmodules.set(section, "fxrequired", "ToplevelRequired")
+
+
+ def translate_repo(self):
+ """
+ Translates external repositories defined within an external file.
+
+ Args:
+ rootpath (str): Root path of the main repository.
+ gitmodules (str): Path to the .gitmodules file.
+ external_file (str): The path to the external file containing repository definitions.
+ """
+ econfig = ConfigParser()
+ econfig.read((self.rootpath / Path(self.externals)))
+
+ for section in econfig.sections():
+ if section == "externals_description":
+ logger.info("skipping section {}".format(section))
+ return
+ logger.info("Translating section {}".format(section))
+ tag = econfig.get(section, "tag", raw=False, fallback=None)
+ url = econfig.get(section, "repo_url", raw=False, fallback=None)
+ path = econfig.get(section, "local_path", raw=False, fallback=None)
+ efile = econfig.get(section, "externals", raw=False, fallback=None)
+ hash_ = econfig.get(section, "hash", raw=False, fallback=None)
+ sparse = econfig.get(section, "sparse", raw=False, fallback=None)
+ protocol = econfig.get(section, "protocol", raw=False, fallback=None)
+
+ self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol)
+
+
+
+def _main():
+ rootpath, gitmodules, externals = commandline_arguments()
+ global logger
+ logger = logging.getLogger(__name__)
+ with utils.pushd(rootpath):
+ t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals)
+ logger.info("Translating {}".format(rootpath))
+ t.translate_repo()
+
+
+if __name__ == "__main__":
+ sys.exit(_main())
diff --git a/manage_externals/manic/utils.py b/.lib/git-fleximod/git_fleximod/utils.py
similarity index 64%
rename from manage_externals/manic/utils.py
rename to .lib/git-fleximod/git_fleximod/utils.py
index 9c63ffe65e..1a2d5ccf2f 100644
--- a/manage_externals/manic/utils.py
+++ b/.lib/git-fleximod/git_fleximod/utils.py
@@ -4,23 +4,31 @@
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
import logging
import os
import subprocess
import sys
from threading import Timer
+from pathlib import Path
-from .global_constants import LOCAL_PATH_INDICATOR
-
+LOCAL_PATH_INDICATOR = "."
# ---------------------------------------------------------------------
#
-# screen and logging output and functions to massage text for output
+# functions to massage text for output and other useful utilities
#
# ---------------------------------------------------------------------
+from contextlib import contextmanager
+
+
+@contextmanager
+def pushd(new_dir):
+ """context for chdir. usage: with pushd(new_dir)"""
+ previous_dir = os.getcwd()
+ os.chdir(new_dir)
+ try:
+ yield
+ finally:
+ os.chdir(previous_dir)
def log_process_output(output):
@@ -30,7 +38,7 @@ def log_process_output(output):
line. This makes it hard to filter with grep.
"""
- output = output.split('\n')
+ output = output.split("\n")
for line in output:
logging.debug(line)
@@ -48,6 +56,18 @@ def printlog(msg, **kwargs):
sys.stdout.flush()
+def find_upwards(root_dir, filename):
+ """Find a file in root dir or any of it's parents"""
+ d = Path(root_dir)
+ root = Path(d.root)
+ while d != root:
+ attempt = d / filename
+ if attempt.exists():
+ return attempt
+ d = d.parent
+ return None
+
+
def last_n_lines(the_string, n_lines, truncation_message=None):
"""Returns the last n lines of the given string
@@ -68,9 +88,9 @@ def last_n_lines(the_string, n_lines, truncation_message=None):
return_val = the_string
else:
lines_subset = lines[-n_lines:]
- str_truncated = ''.join(lines_subset)
+ str_truncated = "".join(lines_subset)
if truncation_message:
- str_truncated = truncation_message + '\n' + str_truncated
+ str_truncated = truncation_message + "\n" + str_truncated
return_val = str_truncated
return return_val
@@ -90,9 +110,10 @@ def indent_string(the_string, indent_level):
"""
lines = the_string.splitlines(True)
- padding = ' ' * indent_level
+ padding = " " * indent_level
lines_indented = [padding + line for line in lines]
- return ''.join(lines_indented)
+ return "".join(lines_indented)
+
# ---------------------------------------------------------------------
#
@@ -121,24 +142,26 @@ def str_to_bool(bool_str):
"""
value = None
str_lower = bool_str.lower()
- if str_lower in ('true', 't'):
+ if str_lower in ("true", "t"):
value = True
- elif str_lower in ('false', 'f'):
+ elif str_lower in ("false", "f"):
value = False
if value is None:
- msg = ('ERROR: invalid boolean string value "{0}". '
- 'Must be "true" or "false"'.format(bool_str))
+ msg = (
+ 'ERROR: invalid boolean string value "{0}". '
+ 'Must be "true" or "false"'.format(bool_str)
+ )
fatal_error(msg)
return value
-REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@']
+REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"]
def is_remote_url(url):
"""check if the user provided a local file path instead of a
- remote. If so, it must be expanded to an absolute
- path.
+ remote. If so, it must be expanded to an absolute
+ path.
"""
remote_url = False
@@ -150,7 +173,7 @@ def is_remote_url(url):
def split_remote_url(url):
"""check if the user provided a local file path or a
- remote. If remote, try to strip off protocol info.
+ remote. If remote, try to strip off protocol info.
"""
remote_url = is_remote_url(url)
@@ -158,13 +181,13 @@ def split_remote_url(url):
return url
for prefix in REMOTE_PREFIXES:
- url = url.replace(prefix, '')
+ url = url.replace(prefix, "")
- if '@' in url:
- url = url.split('@')[1]
+ if "@" in url:
+ url = url.split("@")[1]
- if ':' in url:
- url = url.split(':')[1]
+ if ":" in url:
+ url = url.split(":")[1]
return url
@@ -186,10 +209,12 @@ def expand_local_url(url, field):
url = os.path.expandvars(url)
url = os.path.expanduser(url)
if not os.path.isabs(url):
- msg = ('WARNING: Externals description for "{0}" contains a '
- 'url that is not remote and does not expand to an '
- 'absolute path. Version control operations may '
- 'fail.\n\nurl={1}'.format(field, url))
+ msg = (
+ 'WARNING: Externals description for "{0}" contains a '
+ "url that is not remote and does not expand to an "
+ "absolute path. Version control operations may "
+ "fail.\n\nurl={1}".format(field, url)
+ )
printlog(msg)
else:
url = os.path.normpath(url)
@@ -208,27 +233,30 @@ def expand_local_url(url, field):
def _hanging_msg(working_directory, command):
- print("""
+ print(
+ """
Command '{command}'
from directory {working_directory}
has taken {hanging_sec} seconds. It may be hanging.
The command will continue to run, but you may want to abort
-manage_externals with ^C and investigate. A possible cause of hangs is
-when svn or git require authentication to access a private
-repository. On some systems, svn and git requests for authentication
-information will not be displayed to the user. In this case, the program
-will appear to hang. Ensure you can run svn and git manually and access
-all repositories without entering your authentication information.
-
-""".format(command=command,
- working_directory=working_directory,
- hanging_sec=_HANGING_SEC))
-
-
-def execute_subprocess(commands, status_to_caller=False,
- output_to_caller=False):
+git-fleximod with ^C and investigate. A possible cause of hangs is git
+requires authentication to access a private repository. On some
+systems, git requests for authentication information will not
+be displayed to the user. In this case, the program will appear to
+hang. Ensure you can run git manually and access all
+repositories without entering your authentication information.
+
+""".format(
+ command=command,
+ working_directory=working_directory,
+ hanging_sec=_HANGING_SEC,
+ )
+ )
+
+
+def execute_subprocess(commands, status_to_caller=False, output_to_caller=False):
"""Wrapper around subprocess.check_output to handle common
exceptions.
@@ -242,32 +270,35 @@ def execute_subprocess(commands, status_to_caller=False,
"""
cwd = os.getcwd()
- msg = 'In directory: {0}\nexecute_subprocess running command:'.format(cwd)
+ msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd)
logging.info(msg)
- commands_str = ' '.join(commands)
+ commands_str = " ".join(str(element) for element in commands)
logging.info(commands_str)
return_to_caller = status_to_caller or output_to_caller
status = -1
- output = ''
- hanging_timer = Timer(_HANGING_SEC, _hanging_msg,
- kwargs={"working_directory": cwd,
- "command": commands_str})
+ output = ""
+ hanging_timer = Timer(
+ _HANGING_SEC,
+ _hanging_msg,
+ kwargs={"working_directory": cwd, "command": commands_str},
+ )
hanging_timer.start()
try:
- output = subprocess.check_output(commands, stderr=subprocess.STDOUT,
- universal_newlines=True)
+ output = subprocess.check_output(
+ commands, stderr=subprocess.STDOUT, universal_newlines=True
+ )
log_process_output(output)
status = 0
except OSError as error:
msg = failed_command_msg(
- 'Command execution failed. Does the executable exist?',
- commands)
+ "Command execution failed. Does the executable exist?", commands
+ )
logging.error(error)
fatal_error(msg)
except ValueError as error:
msg = failed_command_msg(
- 'DEV_ERROR: Invalid arguments trying to run subprocess',
- commands)
+ "DEV_ERROR: Invalid arguments trying to run subprocess", commands
+ )
logging.error(error)
fatal_error(msg)
except subprocess.CalledProcessError as error:
@@ -277,10 +308,11 @@ def execute_subprocess(commands, status_to_caller=False,
# responsibility determine if an error occurred and handle it
# appropriately.
if not return_to_caller:
- msg_context = ('Process did not run successfully; '
- 'returned status {0}'.format(error.returncode))
- msg = failed_command_msg(msg_context, commands,
- output=error.output)
+ msg_context = (
+ "Process did not run successfully; "
+ "returned status {0}".format(error.returncode)
+ )
+ msg = failed_command_msg(msg_context, commands, output=error.output)
logging.error(error)
logging.error(msg)
log_process_output(error.output)
@@ -309,22 +341,25 @@ def failed_command_msg(msg_context, command, output=None):
"""
if output:
- output_truncated = last_n_lines(output, 20,
- truncation_message='[... Output truncated for brevity ...]')
- errmsg = ('Failed with output:\n' +
- indent_string(output_truncated, 4) +
- '\nERROR: ')
+ output_truncated = last_n_lines(
+ output, 20, truncation_message="[... Output truncated for brevity ...]"
+ )
+ errmsg = (
+ "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: "
+ )
else:
- errmsg = ''
+ errmsg = ""
- command_str = ' '.join(command)
+ command_str = " ".join(command)
errmsg += """In directory
{cwd}
{context}:
{command}
-""".format(cwd=os.getcwd(), context=msg_context, command=command_str)
+""".format(
+ cwd=os.getcwd(), context=msg_context, command=command_str
+ )
if output:
- errmsg += 'See above for output from failed command.\n'
+ errmsg += "See above for output from failed command.\n"
return errmsg
diff --git a/.lib/git-fleximod/poetry.lock b/.lib/git-fleximod/poetry.lock
new file mode 100644
index 0000000000..b59ed3942c
--- /dev/null
+++ b/.lib/git-fleximod/poetry.lock
@@ -0,0 +1,693 @@
+# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+
+[[package]]
+name = "alabaster"
+version = "0.7.13"
+description = "A configurable sidebar-enabled Sphinx theme"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"},
+ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
+]
+
+[[package]]
+name = "babel"
+version = "2.14.0"
+description = "Internationalization utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"},
+ {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"},
+]
+
+[package.dependencies]
+pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
+
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
+
+[[package]]
+name = "certifi"
+version = "2024.2.2"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
+ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "docutils"
+version = "0.19"
+description = "Docutils -- Python Documentation Utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"},
+ {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.2.0"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
+ {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "fsspec"
+version = "2023.12.2"
+description = "File-system specification"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"},
+ {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"},
+]
+
+[package.extras]
+abfs = ["adlfs"]
+adl = ["adlfs"]
+arrow = ["pyarrow (>=1)"]
+dask = ["dask", "distributed"]
+devel = ["pytest", "pytest-cov"]
+dropbox = ["dropbox", "dropboxdrivefs", "requests"]
+full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
+fuse = ["fusepy"]
+gcs = ["gcsfs"]
+git = ["pygit2"]
+github = ["requests"]
+gs = ["gcsfs"]
+gui = ["panel"]
+hdfs = ["pyarrow (>=1)"]
+http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"]
+libarchive = ["libarchive-c"]
+oci = ["ocifs"]
+s3 = ["s3fs"]
+sftp = ["paramiko"]
+smb = ["smbprotocol"]
+ssh = ["paramiko"]
+tqdm = ["tqdm"]
+
+[[package]]
+name = "gitdb"
+version = "4.0.11"
+description = "Git Object Database"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"},
+ {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"},
+]
+
+[package.dependencies]
+smmap = ">=3.0.1,<6"
+
+[[package]]
+name = "gitpython"
+version = "3.1.41"
+description = "GitPython is a Python library used to interact with Git repositories"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"},
+ {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"},
+]
+
+[package.dependencies]
+gitdb = ">=4.0.1,<5"
+
+[package.extras]
+test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"]
+
+[[package]]
+name = "idna"
+version = "3.6"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
+ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
+]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "7.0.1"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"},
+ {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.3"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
+ {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.5"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
+ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pluggy"
+version = "1.4.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
+ {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pyfakefs"
+version = "5.3.5"
+description = "pyfakefs implements a fake file system that mocks the Python file system modules."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"},
+ {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.17.2"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
+ {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
+]
+
+[package.extras]
+plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pytest"
+version = "8.0.0"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"},
+ {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=1.3.0,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytz"
+version = "2024.1"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+]
+
+[[package]]
+name = "requests"
+version = "2.31.0"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+ {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "smmap"
+version = "5.0.1"
+description = "A pure Python implementation of a sliding window memory map manager"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"},
+ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+optional = false
+python-versions = "*"
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "sphinx"
+version = "5.3.0"
+description = "Python documentation generator"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"},
+ {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"},
+]
+
+[package.dependencies]
+alabaster = ">=0.7,<0.8"
+babel = ">=2.9"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.14,<0.20"
+imagesize = ">=1.3"
+importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
+Jinja2 = ">=3.0"
+packaging = ">=21.0"
+Pygments = ">=2.12"
+requests = ">=2.5.0"
+snowballstemmer = ">=2.0"
+sphinxcontrib-applehelp = "*"
+sphinxcontrib-devhelp = "*"
+sphinxcontrib-htmlhelp = ">=2.0.0"
+sphinxcontrib-jsmath = "*"
+sphinxcontrib-qthelp = "*"
+sphinxcontrib-serializinghtml = ">=1.1.5"
+
+[package.extras]
+docs = ["sphinxcontrib-websupport"]
+lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"]
+test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "1.0.4"
+description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"},
+ {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "1.0.2"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
+ {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.0.1"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"},
+ {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["html5lib", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[package.extras]
+test = ["flake8", "mypy", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "1.0.3"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
+ {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "1.1.5"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
+ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.0"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"},
+ {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+h2 = ["h2 (>=4,<5)"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "wheel"
+version = "0.42.0"
+description = "A built-package format for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"},
+ {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"},
+]
+
+[package.extras]
+test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
+
+[[package]]
+name = "zipp"
+version = "3.17.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
+ {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.8"
+content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d"
diff --git a/.lib/git-fleximod/pyproject.toml b/.lib/git-fleximod/pyproject.toml
new file mode 100644
index 0000000000..5b1332549c
--- /dev/null
+++ b/.lib/git-fleximod/pyproject.toml
@@ -0,0 +1,41 @@
+[tool.poetry]
+name = "git-fleximod"
+version = "0.7.8"
+description = "Extended support for git-submodule and git-sparse-checkout"
+authors = ["Jim Edwards "]
+maintainers = ["Jim Edwards "]
+license = "MIT"
+readme = "README.md"
+homepage = "https://github.com/jedwards4b/git-fleximod"
+keywords = ["git", "submodule", "sparse-checkout"]
+packages = [
+{ include = "git_fleximod"},
+{ include = "doc"},
+]
+
+[tool.poetry.scripts]
+git-fleximod = "git_fleximod.git_fleximod:main"
+me2flexi = "git_fleximod.metoflexi:_main"
+fsspec = "fsspec.fuse:main"
+
+[tool.poetry.dependencies]
+python = "^3.8"
+GitPython = "^3.1.0"
+sphinx = "^5.0.0"
+fsspec = "^2023.12.2"
+wheel = "^0.42.0"
+pytest = "^8.0.0"
+pyfakefs = "^5.3.5"
+
+[tool.poetry.urls]
+"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues"
+
+[tool.pytest.ini_options]
+markers = [
+ "skip_after_first: only run on first iteration"
+]
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
diff --git a/.lib/git-fleximod/tbump.toml b/.lib/git-fleximod/tbump.toml
new file mode 100644
index 0000000000..c4f7ac96ea
--- /dev/null
+++ b/.lib/git-fleximod/tbump.toml
@@ -0,0 +1,43 @@
+# Uncomment this if your project is hosted on GitHub:
+github_url = "https://github.com/jedwards4b/git-fleximod/"
+
+[version]
+current = "0.7.8"
+
+# Example of a semver regexp.
+# Make sure this matches current_version before
+# using tbump
+regex = '''
+ (?P\d+)
+ \.
+ (?P\d+)
+ \.
+ (?P\d+)
+ '''
+
+[git]
+message_template = "Bump to {new_version}"
+tag_template = "v{new_version}"
+
+# For each file to patch, add a [[file]] config
+# section containing the path of the file, relative to the
+# tbump.toml location.
+[[file]]
+src = "git_fleximod/cli.py"
+
+[[file]]
+src = "pyproject.toml"
+
+# You can specify a list of commands to
+# run after the files have been patched
+# and before the git commit is made
+
+# [[before_commit]]
+# name = "check changelog"
+# cmd = "grep -q {new_version} Changelog.rst"
+
+# Or run some commands after the git tag and the branch
+# have been pushed:
+# [[after_push]]
+# name = "publish"
+# cmd = "./publish.sh"
diff --git a/.lib/git-fleximod/tests/__init__.py b/.lib/git-fleximod/tests/__init__.py
new file mode 100644
index 0000000000..4d4c66c78e
--- /dev/null
+++ b/.lib/git-fleximod/tests/__init__.py
@@ -0,0 +1,3 @@
+import sys, os
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src"))
diff --git a/.lib/git-fleximod/tests/conftest.py b/.lib/git-fleximod/tests/conftest.py
new file mode 100644
index 0000000000..65ee85d23d
--- /dev/null
+++ b/.lib/git-fleximod/tests/conftest.py
@@ -0,0 +1,138 @@
+import pytest
+from git_fleximod.gitinterface import GitInterface
+import os
+import subprocess
+import logging
+from pathlib import Path
+
+@pytest.fixture(scope='session')
+def logger():
+ logging.basicConfig(
+ level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()]
+ )
+ logger = logging.getLogger(__name__)
+ return logger
+
+all_repos=[
+ {"subrepo_path": "modules/test",
+ "submodule_name": "test_submodule",
+ "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0",
+ "status2" : "test_submodule at tag MPIserial_2.4.0",
+ "status3" : "test_submodule at tag MPIserial_2.4.0",
+ "status4" : "test_submodule at tag MPIserial_2.4.0",
+ "gitmodules_content" : """
+ [submodule "test_submodule"]
+ path = modules/test
+ url = https://github.com/ESMCI/mpi-serial.git
+ fxtag = MPIserial_2.4.0
+ fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git
+ fxrequired = ToplevelRequired
+"""},
+ {"subrepo_path": "modules/test_optional",
+ "submodule_name": "test_optional",
+ "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0",
+ "status2" : "test_optional at tag MPIserial_2.4.0",
+ "status3" : "test_optional not checked out, out of sync at tag None, expected tag is MPIserial_2.4.0 (optional)",
+ "status4" : "test_optional at tag MPIserial_2.4.0",
+ "gitmodules_content": """
+ [submodule "test_optional"]
+ path = modules/test_optional
+ url = https://github.com/ESMCI/mpi-serial.git
+ fxtag = MPIserial_2.4.0
+ fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git
+ fxrequired = ToplevelOptional
+"""},
+ {"subrepo_path": "modules/test_alwaysoptional",
+ "submodule_name": "test_alwaysoptional",
+ "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c",
+ "status2" : "test_alwaysoptional at hash e5cf35c",
+ "status3" : "out of sync at tag None, expected tag is e5cf35c",
+ "status4" : "test_alwaysoptional at hash e5cf35c",
+ "gitmodules_content": """
+ [submodule "test_alwaysoptional"]
+ path = modules/test_alwaysoptional
+ url = https://github.com/ESMCI/mpi-serial.git
+ fxtag = e5cf35c
+ fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git
+ fxrequired = AlwaysOptional
+"""},
+ {"subrepo_path": "modules/test_sparse",
+ "submodule_name": "test_sparse",
+ "status1" : "test_sparse at tag MPIserial_2.5.0",
+ "status2" : "test_sparse at tag MPIserial_2.5.0",
+ "status3" : "test_sparse at tag MPIserial_2.5.0",
+ "status4" : "test_sparse at tag MPIserial_2.5.0",
+ "gitmodules_content": """
+ [submodule "test_sparse"]
+ path = modules/test_sparse
+ url = https://github.com/ESMCI/mpi-serial.git
+ fxtag = MPIserial_2.5.0
+ fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git
+ fxrequired = AlwaysRequired
+ fxsparse = ../.sparse_file_list
+"""},
+]
+@pytest.fixture(params=all_repos)
+
+def shared_repos(request):
+ return request.param
+
+@pytest.fixture
+def get_all_repos():
+ return all_repos
+
+def write_sparse_checkout_file(fp):
+ sparse_content = """m4
+"""
+ fp.write_text(sparse_content)
+
+@pytest.fixture
+def test_repo(shared_repos, tmp_path, logger):
+ subrepo_path = shared_repos["subrepo_path"]
+ submodule_name = shared_repos["submodule_name"]
+ test_dir = tmp_path / "testrepo"
+ test_dir.mkdir()
+ str_path = str(test_dir)
+ gitp = GitInterface(str_path, logger)
+ assert test_dir.joinpath(".git").is_dir()
+ (test_dir / "modules").mkdir()
+ if "sparse" in submodule_name:
+ (test_dir / subrepo_path).mkdir()
+ # Add the sparse checkout file
+ write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list")
+ gitp.git_operation("add","modules/.sparse_file_list")
+ else:
+ gitp = GitInterface(str(test_dir), logger)
+ gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path)
+ assert test_dir.joinpath(".gitmodules").is_file()
+ gitp.git_operation("add",subrepo_path)
+ gitp.git_operation("commit","-a","-m","\"add submod\"")
+ test_dir2 = tmp_path / "testrepo2"
+ gitp.git_operation("clone",test_dir,test_dir2)
+ return test_dir2
+
+
+@pytest.fixture
+def complex_repo(tmp_path, logger):
+ test_dir = tmp_path / "testcomplex"
+ test_dir.mkdir()
+ str_path = str(test_dir)
+ gitp = GitInterface(str_path, logger)
+ gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2")
+ gitp.git_operation("fetch", "origin", "main")
+ gitp.git_operation("checkout", "main")
+ return test_dir
+
+@pytest.fixture
+def git_fleximod():
+ def _run_fleximod(path, args, input=None):
+ cmd = ["git", "fleximod"] + args.split()
+ result = subprocess.run(cmd, cwd=path, input=input,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ text=True)
+ if result.returncode:
+ print(result.stdout)
+ print(result.stderr)
+ return result
+ return _run_fleximod
+
diff --git a/.lib/git-fleximod/tests/test_a_import.py b/.lib/git-fleximod/tests/test_a_import.py
new file mode 100644
index 0000000000..d5ca878de5
--- /dev/null
+++ b/.lib/git-fleximod/tests/test_a_import.py
@@ -0,0 +1,8 @@
+# pylint: disable=unused-import
+from git_fleximod import cli
+from git_fleximod import utils
+from git_fleximod.gitinterface import GitInterface
+from git_fleximod.gitmodules import GitModules
+
+def test_import():
+ print("here")
diff --git a/.lib/git-fleximod/tests/test_b_update.py b/.lib/git-fleximod/tests/test_b_update.py
new file mode 100644
index 0000000000..159f1cfae0
--- /dev/null
+++ b/.lib/git-fleximod/tests/test_b_update.py
@@ -0,0 +1,26 @@
+import pytest
+from pathlib import Path
+
+def test_basic_checkout(git_fleximod, test_repo, shared_repos):
+ # Prepare a simple .gitmodules
+ gm = shared_repos['gitmodules_content']
+ file_path = (test_repo / ".gitmodules")
+ repo_name = shared_repos["submodule_name"]
+ repo_path = shared_repos["subrepo_path"]
+
+ file_path.write_text(gm)
+
+ # Run the command
+ result = git_fleximod(test_repo, f"update {repo_name}")
+
+ # Assertions
+ assert result.returncode == 0
+ assert Path(test_repo / repo_path).exists() # Did the submodule directory get created?
+ if "sparse" in repo_name:
+ assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created?
+ assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created?
+
+ status = git_fleximod(test_repo, f"status {repo_name}")
+
+ assert shared_repos["status2"] in status.stdout
+
diff --git a/.lib/git-fleximod/tests/test_c_required.py b/.lib/git-fleximod/tests/test_c_required.py
new file mode 100644
index 0000000000..89ab8d294d
--- /dev/null
+++ b/.lib/git-fleximod/tests/test_c_required.py
@@ -0,0 +1,30 @@
+import pytest
+from pathlib import Path
+
+def test_required(git_fleximod, test_repo, shared_repos):
+ file_path = (test_repo / ".gitmodules")
+ gm = shared_repos["gitmodules_content"]
+ repo_name = shared_repos["submodule_name"]
+ if file_path.exists():
+ with file_path.open("r") as f:
+ gitmodules_content = f.read()
+ # add the entry if it does not exist
+ if repo_name not in gitmodules_content:
+ file_path.write_text(gitmodules_content+gm)
+ # or if it is incomplete
+ elif gm not in gitmodules_content:
+ file_path.write_text(gm)
+ else:
+ file_path.write_text(gm)
+ result = git_fleximod(test_repo, "update")
+ assert result.returncode == 0
+ status = git_fleximod(test_repo, f"status {repo_name}")
+ assert shared_repos["status3"] in status.stdout
+ status = git_fleximod(test_repo, f"update --optional")
+ assert result.returncode == 0
+ status = git_fleximod(test_repo, f"status {repo_name}")
+ assert shared_repos["status4"] in status.stdout
+ status = git_fleximod(test_repo, f"update {repo_name}")
+ assert result.returncode == 0
+ status = git_fleximod(test_repo, f"status {repo_name}")
+ assert shared_repos["status4"] in status.stdout
diff --git a/.lib/git-fleximod/tests/test_d_complex.py b/.lib/git-fleximod/tests/test_d_complex.py
new file mode 100644
index 0000000000..edde7d816d
--- /dev/null
+++ b/.lib/git-fleximod/tests/test_d_complex.py
@@ -0,0 +1,66 @@
+import pytest
+from pathlib import Path
+from git_fleximod.gitinterface import GitInterface
+
+def test_complex_checkout(git_fleximod, complex_repo, logger):
+ status = git_fleximod(complex_repo, "status")
+ assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout)
+ assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout)
+ assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout)
+ assert("Complex not checked out, aligned at tag testtag02" in status.stdout)
+ assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout)
+
+ # This should checkout and update test_submodule and complex_sub
+ result = git_fleximod(complex_repo, "update")
+ assert result.returncode == 0
+
+ status = git_fleximod(complex_repo, "status")
+ assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout)
+ assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout)
+ assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout)
+ assert("Complex at tag testtag02" in status.stdout)
+
+ # now check the complex_sub
+ root = (complex_repo / "modules" / "complex")
+ assert(not (root / "libraries" / "gptl" / ".git").exists())
+ assert(not (root / "libraries" / "mpi-serial" / ".git").exists())
+ assert((root / "modules" / "mpi-serial" / ".git").exists())
+ assert(not (root / "modules" / "mpi-serial2" / ".git").exists())
+ assert((root / "modules" / "mpi-sparse" / ".git").exists())
+ assert((root / "modules" / "mpi-sparse" / "m4").exists())
+ assert(not (root / "modules" / "mpi-sparse" / "README").exists())
+
+ # update a single optional submodule
+
+ result = git_fleximod(complex_repo, "update ToplevelOptional")
+ assert result.returncode == 0
+
+ status = git_fleximod(complex_repo, "status")
+ assert("ToplevelOptional at tag v5.3.2" in status.stdout)
+ assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout)
+ assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout)
+ assert("Complex at tag testtag02" in status.stdout)
+ assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout)
+
+ # Finally update optional
+ result = git_fleximod(complex_repo, "update --optional")
+ assert result.returncode == 0
+
+ status = git_fleximod(complex_repo, "status")
+ assert("ToplevelOptional at tag v5.3.2" in status.stdout)
+ assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout)
+ assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout)
+ assert("Complex at tag testtag02" in status.stdout)
+ assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout)
+
+ # now check the complex_sub
+ root = (complex_repo / "modules" / "complex" )
+ assert(not (root / "libraries" / "gptl" / ".git").exists())
+ assert(not (root / "libraries" / "mpi-serial" / ".git").exists())
+ assert((root / "modules" / "mpi-serial" / ".git").exists())
+ assert((root / "modules" / "mpi-serial2" / ".git").exists())
+ assert((root / "modules" / "mpi-sparse" / ".git").exists())
+ assert((root / "modules" / "mpi-sparse" / "m4").exists())
+ assert(not (root / "modules" / "mpi-sparse" / "README").exists())
+
+
diff --git a/Externals.cfg b/Externals.cfg
deleted file mode 100644
index 185f412cab..0000000000
--- a/Externals.cfg
+++ /dev/null
@@ -1,101 +0,0 @@
-[clm]
-local_path = .
-protocol = externals_only
-externals = Externals_CLM.cfg
-required = True
-
-[cism]
-local_path = components/cism
-protocol = git
-repo_url = https://github.com/ESCOMP/CISM-wrapper
-tag = cismwrap_2_1_99
-externals = Externals_CISM.cfg
-required = True
-
-[rtm]
-local_path = components/rtm
-protocol = git
-repo_url = https://github.com/ESCOMP/RTM
-tag = rtm1_0_79
-required = True
-
-[mosart]
-local_path = components/mosart
-protocol = git
-repo_url = https://github.com/ESCOMP/MOSART
-tag = mosart1_0_49
-required = True
-
-[mizuRoute]
-local_path = components/mizuRoute
-protocol = git
-repo_url = https://github.com/nmizukami/mizuRoute
-hash = 34723c2
-required = True
-
-[ccs_config]
-tag = ccs_config_cesm0.0.92
-protocol = git
-repo_url = https://github.com/ESMCI/ccs_config_cesm.git
-local_path = ccs_config
-required = True
-
-[cime]
-local_path = cime
-protocol = git
-repo_url = https://github.com/ESMCI/cime
-tag = cime6.0.217_httpsbranch03
-required = True
-
-[cmeps]
-tag = cmeps0.14.50
-protocol = git
-repo_url = https://github.com/ESCOMP/CMEPS.git
-local_path = components/cmeps
-required = True
-
-[cdeps]
-tag = cdeps1.0.28
-protocol = git
-repo_url = https://github.com/ESCOMP/CDEPS.git
-local_path = components/cdeps
-externals = Externals_CDEPS.cfg
-required = True
-
-[cpl7]
-tag = cpl77.0.7
-protocol = git
-repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps
-local_path = components/cpl7
-required = True
-
-[share]
-tag = share1.0.18
-protocol = git
-repo_url = https://github.com/ESCOMP/CESM_share
-local_path = share
-required = True
-
-[mct]
-tag = MCT_2.11.0
-protocol = git
-repo_url = https://github.com/MCSclimate/MCT
-local_path = libraries/mct
-required = True
-
-[parallelio]
-tag = pio2_6_2
-protocol = git
-repo_url = https://github.com/NCAR/ParallelIO
-local_path = libraries/parallelio
-required = True
-
-[doc-builder]
-local_path = doc/doc-builder
-protocol = git
-repo_url = https://github.com/ESMCI/doc-builder
-tag = v1.0.8
-required = False
-
-[externals_description]
-schema_version = 1.0.0
diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg
deleted file mode 100644
index 378992c777..0000000000
--- a/Externals_CLM.cfg
+++ /dev/null
@@ -1,9 +0,0 @@
-[fates]
-local_path = src/fates
-protocol = git
-repo_url = https://github.com/NGEET/fates
-tag = sci.1.72.2_api.34.0.0
-required = True
-
-[externals_description]
-schema_version = 1.0.0
diff --git a/README b/README
index 18cc2b1458..8b4e15e557 100644
--- a/README
+++ b/README
@@ -47,17 +47,16 @@ doc --------------- Documentation of CTSM.
bld --------------- build-namelist scripts for CTSM.
src --------------- CTSM Source code.
lilac ------------- Lightweight Infrastructure for Land-Atmosphere Coupling (for coupling to a host atmosphere model)
-test -------------- CTSM Testing scripts for CTSM offline tools (deprecated)
tools ------------- CTSM Offline tools to prepare input datasets and process output.
cime_config ------- Configuration files of cime for compsets and CTSM settings
-manage_externals -- Script to manage the external source directories (deprecated)
+bin/git-fleximod -- Script to manage the needed sub-component source directories (handled with git submodule)
py_env_create ----- Script to setup the python environment for CTSM python tools using conda
python ------------ Python modules used in tools and testing and automated checking of ALL CTSM python scirpts
Directory structure only for a CTSM checkout:
components -------- Other active sub-components needed for CTSM to run (river routing and land-ice models)
-libraries --------- CESM libraries: MCT (Model Coupling Toolkit) and PIO (deprecated)
+libraries --------- CESM libraries: PIO (deprecated)
share ------------- CESM shared code
ccs_config -------- CIME configure files (for grids, compsets, and machines) for CESM
@@ -68,14 +67,13 @@ components/cdeps -------------------- CESM top level data model shared code (for
components/cism --------------------- CESM Community land Ice Sheet Model.
components/mosart ------------------- Model for Scale Adaptive River Transport
components/rtm ---------------------- CESM River Transport Model.
-components/cpl7 --------------------- CESM top level driver for MCT driver (deprecated will be removed)
Top level documentation ($CTSMROOT):
README ------------------- This file
README.md ---------------- File that displays on github under https::/github.com/ESCOMP/CTSM.git
README.rst --------------- File that displays under the project in github
-README_EXTERNALS.rst ----- Information on how to work with manage_externals for CTSM (deprecated)
+README_GITFLEXIMOD.rst --- Information on how to work with git-fleximod for CTSM
CODE_OF_CONDUCT.md ------- Code of Conduct for how to work with each other on the CTSM project
Copyright ---------------- CESM Copyright file
doc/UpdateChangeLog.pl --- Script to add documentation on a tag to the
@@ -99,9 +97,6 @@ bld/namelist_files/namelist_defaults_ctsm.xml ----- Default values
Important files in main directories (under $CTSMROOT):
=============================================================================================
-Externals.cfg --------------- File for management of the main high level external (deprecated)
-Externals_CLM.cfg ----------- File for management of the CTSM specific externals (i.e. FATES)
-
run_sys_tests --------------- Python script to send the standard CTSM testing off (submits
the create_test test suite for several different compilers on the
machines we do standard CTSM testing on).
@@ -154,7 +149,7 @@ Source code directory structure:
src/biogeochem ---- Biogeochemisty
src/main ---------- Main control and high level code
-src/cpl ----------- Land model high level caps for NUOPC driver (and MCT and LILAC)
+src/cpl ----------- Land model high level caps for NUOPC driver (and LILAC)
src/biogeophys ---- Biogeophysics (Hydrology)
src/dyn_subgrid --- Dynamic land unit change
src/init_interp --- Online interpolation
diff --git a/README.NUOPC_driver.md b/README.NUOPC_driver.md
index ba0b70c2c0..6caf63a9bd 100644
--- a/README.NUOPC_driver.md
+++ b/README.NUOPC_driver.md
@@ -40,12 +40,3 @@ ESMF_PROFILING_LEVEL --- Verbosity level for ESMF profiling
nuopc.runseq is a text file that determines how the driver operates. You can change the operation
by having an updated copy in your case directory.
-
-## What if I want to use the MCT driver?
-
-The MCT driver is now deprecated, and will be removed. So at this point we don't
-suggest using it anymore.
-
-For more notes see:
-
-https://docs.google.com/presentation/d/1yjiKSEV53JDAJbYxhpY2T9GTxlWFzQAn
diff --git a/README_EXTERNALS.rst b/README_EXTERNALS.rst
deleted file mode 100644
index ed7a068991..0000000000
--- a/README_EXTERNALS.rst
+++ /dev/null
@@ -1,132 +0,0 @@
-Obtaining the full model code and associated scripting infrastructure
-=====================================================================
-
-[!CAUTION]
-This is deprecated and will be replaced with git submodules. See
-https://github.com/ESCOMP/CTSM/pull/2443
-
-
-CTSM is released via GitHub. You will need some familiarity with git in order
-to modify the code and commit these changes. However, to simply checkout and run the
-code, no git knowledge is required other than what is documented in the following steps.
-
-To obtain the CTSM code you need to do the following:
-
-#. Clone the repository. ::
-
- git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox
-
- This will create a directory ``my_ctsm_sandbox/`` in your current working directory.
-
-#. Run the script **manage_externals/checkout_externals**. ::
-
- ./manage_externals/checkout_externals
-
- The **checkout_externals** script is a package manager that will
- populate the ctsm directory with the relevant versions of each of the
- components along with the CIME infrastructure code.
-
-At this point you have a working version of CTSM.
-
-To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ .
-
-More details on checkout_externals
-----------------------------------
-
-The file **Externals.cfg** in your top-level CTSM directory tells
-**checkout_externals** which tag/branch of each component should be
-brought in to generate your sandbox. **Externals_CLM.cfg** is used similarly to point to the correct version of FATES (and possibly other CTSM-specific externals in the future); the below instructions referring to **Externals.cfg** also apply to modifying **Externals_CLM.cfg**.
-
-NOTE: checkout_externals will always attempt
-to make the working copy exactly match the externals description. If
-you manually modify an external without updating Externals.cfg, e.g. switch
-to a different tag, then rerunning checkout_externals will switch you
-back to the external described in Externals.cfg. See below
-documentation `Customizing your CTSM sandbox`_ for more details.
-
-**You need to rerun checkout_externals whenever Externals.cfg has
-changed** (unless you have already manually updated the relevant
-external(s) to have the correct branch/tag checked out). Common times
-when this is needed are:
-
-* After checking out a new CTSM branch/tag
-
-* After merging some other CTSM branch/tag into your currently
- checked-out branch
-
-**checkout_externals** must be run from the root of the source
-tree. For example, if you cloned CTSM with::
-
- git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox
-
-then you must run **checkout_externals** from
-``/path/to/my_ctsm_sandbox``.
-
-To see more details of **checkout_externals**, issue ::
-
- ./manage_externals/checkout_externals --help
-
-Customizing your CTSM sandbox
-=============================
-
-There are several use cases to consider when you want to customize or modify your CTSM sandbox.
-
-Switching to a different CTSM branch or tag
--------------------------------------------
-
-If you have already checked out a branch or tag and **HAVE NOT MADE ANY
-MODIFICATIONS** it is simple to change your sandbox. Say that you
-checked out ctsm1.0.0 but really wanted to have ctsm1.1.0;
-you would simply do the following::
-
- git checkout ctsm1.1.0
- ./manage_externals/checkout_externals
-
-You should **not** use this method if you have made any source code
-changes, or if you have any ongoing CTSM cases that were created from
-this sandbox. In these cases, it is often easiest to do a second **git
-clone**.
-
-Pointing to a different version of a component
-----------------------------------------------
-
-Each entry in **Externals.cfg** has the following form (we use CIME as an
-example below)::
-
- [cime]
- local_path = cime
- protocol = git
- repo_url = https://github.com/CESM-Development/cime
- tag = cime5.4.0-alpha.20
- required = True
-
-Each entry specifies either a tag, a hash or a branch. To point to a new tag:
-
-#. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing
- ``cime5.4.0-alpha.20`` to ``cime5.4.0-alpha.21`` above)
-
-#. Checkout the new component(s)::
-
- ./manage_externals/checkout_externals
-
-To point to a hash, the process is the same, except also change ``tag = ...`` to ``hash = ...``.
-
-To point to a branch, use ``branch = ...``. Pointing to a branch means that, each time you run ``manage_externals/checkout_externals`` you will get the current latest version of that branch. This can be convenient for in-progress development work, but should not be used when you need a stable version for scientific simulations. There are a number of gotchas with this workflow, so in general you should default to pointing to fixed hashes. (For CTSM master, we require a fixed hash or, usually, a tag.)
-
-Keep in mind that changing individual components from a tag may result
-in an invalid model (won't compile, won't run, not scientifically
-meaningful) and is unsupported.
-
-Committing your change to Externals.cfg
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-After making this change, it's a good idea to commit the change in your
-local CTSM git repository. First create a branch in your local
-repository, then commit it. (Unlike with subversion, branches are stored
-locally unless you explicitly push them up to GitHub. Feel free to
-create whatever local branches you'd like.) For example::
-
- git checkout -b my_ctsm_branch
- git add Externals.cfg
- git commit -m "Update CIME to cime5.4.0-alpha.20"
-
diff --git a/README_GITFLEXIMOD.rst b/README_GITFLEXIMOD.rst
new file mode 100644
index 0000000000..de6bbf392f
--- /dev/null
+++ b/README_GITFLEXIMOD.rst
@@ -0,0 +1,118 @@
+Obtaining the full model code and associated scripting infrastructure
+=====================================================================
+
+CTSM is released via GitHub. You will need some familiarity with git in order
+to modify the code and commit these changes. However, to simply checkout and run the
+code, no git knowledge is required other than what is documented in the following steps.
+
+To obtain the CTSM code you need to do the following:
+
+#. Clone the repository. ::
+
+ git clone https://github.com/ESCOMP/CTSM.git my_ctsm_sandbox
+
+ This will create a directory ``my_ctsm_sandbox/`` in your current working directory.
+
+#. Run **./bin/git-fleximod update**. ::
+
+ cd my_ctsm_sandbox
+ ./bin/git-fleximod update
+ ./bin/git-fleximod --help # for a user's guide
+
+ **git-fleximod** is a package manager that will
+ populate the ctsm directory with the relevant versions of each of the
+ components along with the CIME infrastructure code.
+ Additional documentation for git-fleximod appears here:
+ https://github.com/ESMCI/git-fleximod?tab=readme-ov-file#git-fleximod
+
+"components" here refers to seperate git repositories for seperable parts of
+the code (such as the MOSART or mizuRoute river models). Because they are
+managed with "submodule" in git hereafter we will refer to them as "submodule(s)".
+
+At this point you have a working version of CTSM.
+
+To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ .
+
+More details on git-fleximod
+----------------------------
+
+The file **.gitmodules** in your top-level CTSM directory tells
+**git-fleximod** which tag/branch of each submodule
+should be brought in to generate your sandbox.
+
+NOTE: If you manually modify a submodule without updating .gitmodules,
+e.g. switch to a different tag, then rerunning git-fleximod will warn you of
+local changes you need to resolve.
+git-fleximod will not change a modified submodule back to what is specified in
+.gitmodules without the --force option.
+See below documentation `Customizing your CTSM sandbox`_ for more details.
+
+**You need to rerun git-fleximod whenever .gitmodules has
+changed** (unless you have already manually updated the relevant
+submodule(s) to have the correct branch/tag checked out). Common times
+when this is needed are:
+
+* After checking out a new CTSM branch/tag
+
+* After merging some other CTSM branch/tag into your currently
+ checked-out branch
+
+Customizing your CTSM sandbox
+=============================
+
+There are several use cases to consider when you want to customize or modify your CTSM sandbox.
+
+Switching to a different CTSM branch or tag
+-------------------------------------------
+
+If you have already checked out a branch or tag and **HAVE NOT MADE ANY
+MODIFICATIONS** it is simple to change your sandbox. Say that you
+checked out ctsm1.0.0 but really wanted to have ctsm1.1.0;
+you would simply do the following::
+
+ git checkout ctsm1.1.0
+ ./bin/git-fleximod update
+
+You should **not** use this method if you have made any source code
+changes, or if you have any ongoing CTSM cases that were created from
+this sandbox. In these cases, it is often easiest to do a second **git
+clone**.
+
+Pointing to a different version of a submodule
+----------------------------------------------
+
+Each entry in **.gitmodules** has the following form (we use CIME as an
+example below)::
+
+ [submodule "cime"]
+ path = cime
+ url = https://github.com/ESMCI/cime
+ fxtag = cime6.0.246
+ fxrequired = ToplevelRequired
+ fxDONOTUSEurl = https://github.com/ESMCI/cime
+
+Each entry specifies either a tag or a hash. To point to a new tag or hash:
+
+#. Modify the relevant entry/entries in **.gitmodules** (e.g., changing
+ ``cime6.0.246`` to ``cime6.0.247`` above)
+
+#. Checkout the new submodule(s)::
+
+ ./bin/git-fleximod update
+
+Keep in mind that changing individual submodule from a tag may result
+in an invalid model (won't compile, won't run, not scientifically
+meaningful) and is unsupported.
+
+Committing your change to .gitmodules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+After making this change, it's a good idea to commit the change in your
+local CTSM git repository. First create a branch in your local
+repository, then commit it. Feel free to create whatever local branches
+you'd like in git. For example::
+
+ git checkout -b my_ctsm_branch
+ git add .gitmodules
+ git commit -m "Update CIME to cime6.0.247"
+
diff --git a/bin/git-fleximod b/bin/git-fleximod
new file mode 100755
index 0000000000..f69ede1c22
--- /dev/null
+++ b/bin/git-fleximod
@@ -0,0 +1,8 @@
+#!/usr/bin/env python3
+import sys
+import os
+sys.path.insert(0,os.path.abspath(os.path.join(os.path.dirname(__file__),"..",".lib","git-fleximod")))
+from git_fleximod.git_fleximod import main
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm
index eb34ac916f..142dd4aae4 100755
--- a/bld/CLMBuildNamelist.pm
+++ b/bld/CLMBuildNamelist.pm
@@ -71,7 +71,7 @@ REQUIRED OPTIONS
(if read they allow user_nl_clm and CLM_BLDNML_OPTS to expand
variables [for example to use \$DIN_LOC_ROOT])
(default current directory)
- -lnd_frac "domainfile" Land fraction file (the input domain file) (needed for MCT driver and LILAC)
+ -lnd_frac "domainfile" Land fraction file (the input domain file) (needed for LILAC)
-res "resolution" Specify horizontal grid. Use nlatxnlon for spectral grids;
dlatxdlon for fv grids (dlat and dlon are the grid cell size
in degrees for latitude and longitude respectively)
@@ -83,7 +83,7 @@ REQUIRED OPTIONS
(default 2000)
-structure "structure" The overall structure being used [ standard | fast ]
OPTIONS
- -driver "value" CESM driver type you will run with [ mct | nuopc ]
+ -driver "value" CESM driver type you will run with [ nuopc ]
-bgc "value" Build CLM with BGC package [ sp | bgc | fates ]
(default is sp).
CLM Biogeochemistry mode
@@ -101,18 +101,22 @@ OPTIONS
(Only for CLM4.5/CLM5.0)
-[no-]chk_res Also check [do NOT check] to make sure the resolution and
land-mask is valid.
- -clm_accelerated_spinup "on|off" Setup in a configuration to run as fast as possible for doing a throw-away
+ -clm_accelerated_spinup "on|sasu|off" Setup in a configuration to run as fast as possible for doing a throw-away
simulation in order to get the model to a spun-up state. So do things like
turn off expensive options and setup for a low level of history output.
If CLM4.5/CLM5.0 and bgc it also includes a prognostic Carbon model (cn or bgc)
, also by default turn on Accelerated Decomposition mode which
- is controlled by the namelist variable spinup_state.
+ is controlled by the namelist variable spinup_state (when soil matrix CN is off).
- Turn on given spinup mode for BGC setting of CN
+ Turn on given spinup mode for BGC setting of CN (soil matrix CN off)
on : Turn on Accelerated Decomposition (spinup_state = 1 or 2)
off : run in normal mode (spinup_state = 0)
+ To spinup using the CN soil matrix method use "sasu" SemiAnalytic Spin-Up (SASU)
+ sasu: Turn on matrix spinup (spinup_matrixcn=T)
+ Normal spinup sequence is: on, sasu, off
+
Default is set by clm_accelerated_spinup mode.
Spinup is now a two step procedure. First, run the model
@@ -366,7 +370,7 @@ sub check_for_perl_utils {
} else {
die <<"EOF";
** Cannot find the root of the cime directory enter it using the -cimeroot option
- Did you run the checkout_externals scripts?
+ Did you run ./bin/git-fleximod update?
EOF
}
}
@@ -783,9 +787,10 @@ sub setup_cmdl_fates_mode {
# dis-allow fates specific namelist items with non-fates runs
my @list = ( "fates_spitfire_mode", "use_fates_planthydro", "use_fates_ed_st3", "use_fates_ed_prescribed_phys",
"use_fates_cohort_age_tracking","use_fates_inventory_init","use_fates_fixed_biogeog",
- "use_fates_nocomp","use_fates_sp","fates_inventory_ctrl_filename","use_fates_logging",
- "fates_parteh_mode","use_fates_tree_damage","fates_history_dimlevel","fates_seeddisp_cadence",
- "use_fates_luh","fluh_timeseries" );
+ "use_fates_nocomp","use_fates_sp","fates_inventory_ctrl_filename","fates_harvest_mode",
+ "fates_parteh_mode","use_fates_tree_damage","fates_seeddisp_cadence","use_fates_luh","fluh_timeseries",
+ "flandusepftdat","use_fates_potentialveg","use_fates_lupft","fates_history_dimlevel" );
+
# dis-allow fates specific namelist items with non-fates runs
foreach my $var ( @list ) {
if ( defined($nl->get_value($var)) ) {
@@ -894,6 +899,7 @@ sub setup_cmdl_bgc {
'phys'=>$nl_flags->{'phys'}, 'use_cn'=>$nl_flags->{'use_cn'}, 'use_fates'=>$nl_flags->{'use_fates'},
'use_fates_sp'=>$nl_flags->{'use_fates_sp'} );
my $soil_decomp_method = remove_leading_and_trailing_quotes( $nl->get_value( $var ) );
+ $nl_flags->{$var} = $soil_decomp_method;
if ( &value_is_true($nl_flags->{'use_cn'}) ) {
if ( $soil_decomp_method eq "None" ) {
@@ -952,6 +958,30 @@ sub setup_cmdl_bgc {
if ( (! &value_is_true($nl_flags->{'use_nitrif_denitrif'}) ) && &value_is_true($nl->get_value('use_fun')) ) {
$log->fatal_error("When FUN is on, use_nitrif_denitrif MUST also be on!");
}
+ #
+ # Make sure clm_accelerate_spinup is set correctly
+ #
+ $var = "clm_accelerated_spinup";
+ if ( $opts->{$var} ne "default" ) {
+ $val = $opts->{$var};
+ } else {
+ $val = $defaults->get_value($var);
+ }
+ $nl_flags->{$var} = $val;
+ # Set soil matrix (which is needed later for spinup)
+ $var = "use_soil_matrixcn";
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var,
+ , 'use_fates'=>$nl_flags->{'use_fates'},
+ , 'soil_decomp_method'=>$nl_flags->{'soil_decomp_method'},
+ , 'phys'=>$nl_flags->{'phys'}, clm_accelerated_spinup=>$nl_flags->{'clm_accelerated_spinup'} );
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $nl_flags->{$var} = ".true.";
+ } else {
+ $nl_flags->{$var} = ".false.";
+ }
+ if ( &value_is_true($nl->get_value($var)) && $nl_flags->{'soil_decomp_method'} ne "CENTURYKoven2013" ) {
+ $log->fatal_error("$var can only be on with CENTURYKoven2013 soil decomposition");
+ }
} # end bgc
@@ -1156,32 +1186,40 @@ sub setup_cmdl_spinup {
my $val;
my $var;
$nl_flags->{'spinup'} = undef;
+ # clm_accelerated_spinup will already have been set in setup_cmdl_bgc
$var = "clm_accelerated_spinup";
- if ( $opts->{$var} ne "default" ) {
- $val = $opts->{$var};
- } else {
- $val = $defaults->get_value($var);
- }
- $nl_flags->{$var} = $val;
+ $val = $nl_flags->{'clm_accelerated_spinup'};
my $group = $definition->get_group_name($var);
$nl->set_variable_value($group, $var, quote_string($val) );
if ( ! $definition->is_valid_value( $var, $val , 'noquotes' => 1) ) {
my @valid_values = $definition->get_valid_values( $var );
$log->fatal_error("$var has an invalid value ($val). Valid values are: @valid_values");
}
+ if ( $nl_flags->{'clm_accelerated_spinup'} eq "sasu" ) {
+ if ( ! &value_is_true($nl_flags->{'use_cn'}) ) {
+ $log->fatal_error("If clm_accelerated_spinup is sasu, use_cn MUST be on" );
+ }
+ if ( ! &value_is_true($nl_flags->{'use_soil_matrixcn'}) ) {
+ $log->fatal_error("If clm_accelerated_spinup is sasu, use_soil_matrixcn MUST be on" );
+ }
+ }
$log->verbose_message("CLM accelerated spinup mode is $val");
if ( &value_is_true($nl_flags->{'use_cn'}) ) {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition,
- $defaults, $nl, "spinup_state", clm_accelerated_spinup=>$nl_flags->{$var},
- use_cn=>$nl_flags->{'use_cn'}, use_fates=>$nl_flags->{'use_fates'} );
+ $defaults, $nl, "spinup_state", clm_accelerated_spinup=>$nl_flags->{'clm_accelerated_spinup'},
+ use_cn=>$nl_flags->{'use_cn'}, use_fates=>$nl_flags->{'use_fates'},
+ use_soil_matrixcn=>$nl_flags->{"use_soil_matrixcn"} );
if ( $nl->get_value("spinup_state") ne 0 ) {
$nl_flags->{'bgc_spinup'} = "on";
+ if ( &value_is_true($nl_flags->{'use_soil_matrixcn'}) ) {
+ $log->fatal_error("spinup_state is accelerated (=1 or 2), but use_soil_matrixcn is also true" .
+ ", change one or the other");
+ }
if ( $nl_flags->{'clm_accelerated_spinup'} eq "off" ) {
$log->fatal_error("spinup_state is accelerated, but clm_accelerated_spinup is off, change one or the other");
}
} else {
$nl_flags->{'bgc_spinup'} = "off";
- $val = $defaults->get_value($var);
}
# For AD spinup mode by default reseed dead plants
if ( $nl_flags->{$var} ne "off" ) {
@@ -1196,6 +1234,27 @@ sub setup_cmdl_spinup {
}
}
$nl_flags->{$var} = $val;
+ if ( &value_is_true($nl_flags->{'use_soil_matrixcn'}) ) {
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, "spinup_matrixcn",
+ , 'use_fates'=>$nl_flags->{'use_fates'}, 'bgc_mode'=>$nl_flags->{'bgc_mode'}
+ , 'phys'=>$nl_flags->{'phys'}, 'use_soil_matrixcn'=>$nl_flags->{'use_soil_matrixcn'},
+ , clm_accelerated_spinup=>$nl_flags->{'clm_accelerated_spinup'} );
+ my $spinup;
+ if ( &value_is_true($nl->get_value("spinup_matrixcn") ) ) {
+ $spinup = ".true.";
+ } else {
+ $spinup = ".false.";
+ }
+ $nl_flags->{'spinup_matrixcn'} = $spinup;
+ if ( &value_is_true($nl_flags->{'spinup_matrixcn'}) ) {
+ $nl_flags->{'bgc_spinup'} = "on";
+ if ( $nl_flags->{'clm_accelerated_spinup'} eq "off" ) {
+ $log->fatal_error("matrix spinup (spinup_matrixcn) is True, but clm_accelerated_spinup is off, change one or the other");
+ }
+ } else {
+ $nl_flags->{'bgc_spinup'} = "off";
+ }
+ }
my $group = $definition->get_group_name($var);
$nl->set_variable_value($group, $var, quote_string($val) );
if ( ! $definition->is_valid_value( $var, $val , 'noquotes' => 1) ) {
@@ -1205,11 +1264,13 @@ sub setup_cmdl_spinup {
if ( $nl_flags->{'bgc_spinup'} eq "on" && (not &value_is_true( $nl_flags->{'use_cn'} )) && (not &value_is_true($nl_flags->{'use_fates'})) ) {
$log->fatal_error("$var can not be '$nl_flags->{'bgc_spinup'}' if neither CN nor FATES is turned on (use_cn=$nl_flags->{'use_cn'}, use_fates=$nl_flags->{'use_fates'}).");
}
- if ( $nl->get_value("spinup_state") eq 0 && $nl_flags->{'bgc_spinup'} eq "on" ) {
- $log->fatal_error("Namelist spinup_state contradicts the command line option bgc_spinup" );
- }
- if ( $nl->get_value("spinup_state") eq 1 && $nl_flags->{'bgc_spinup'} eq "off" ) {
- $log->fatal_error("Namelist spinup_state contradicts the command line option bgc_spinup" );
+ if ( ! &value_is_true($nl_flags->{'use_soil_matrixcn'}) ) {
+ if ( $nl->get_value("spinup_state") eq 0 && $nl_flags->{'bgc_spinup'} eq "on" ) {
+ $log->fatal_error("Namelist spinup_state contradicts the command line option bgc_spinup" );
+ }
+ if ( $nl->get_value("spinup_state") eq 1 && $nl_flags->{'bgc_spinup'} eq "off" ) {
+ $log->fatal_error("Namelist spinup_state contradicts the command line option bgc_spinup" );
+ }
}
$val = $nl_flags->{'bgc_spinup'};
@@ -1582,7 +1643,6 @@ sub process_namelist_inline_logic {
setup_logic_hillslope($opts, $nl_flags, $definition, $defaults, $nl);
setup_logic_o3_veg_stress_method($opts, $nl_flags, $definition, $defaults, $nl,$physv);
setup_logic_hydrstress($opts, $nl_flags, $definition, $defaults, $nl);
- setup_logic_dynamic_roots($opts, $nl_flags, $definition, $defaults, $nl, $physv);
setup_logic_params_file($opts, $nl_flags, $definition, $defaults, $nl);
setup_logic_create_crop_landunit($opts, $nl_flags, $definition, $defaults, $nl);
setup_logic_subgrid($opts, $nl_flags, $definition, $defaults, $nl);
@@ -1595,6 +1655,7 @@ sub process_namelist_inline_logic {
if ( remove_leading_and_trailing_quotes($nl_flags->{'clm_start_type'}) ne "branch" ) {
setup_logic_initial_conditions($opts, $nl_flags, $definition, $defaults, $nl, $physv);
}
+ setup_logic_cnmatrix($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref);
setup_logic_spinup($opts, $nl_flags, $definition, $defaults, $nl);
setup_logic_supplemental_nitrogen($opts, $nl_flags, $definition, $defaults, $nl);
setup_logic_snowpack($opts, $nl_flags, $definition, $defaults, $nl);
@@ -1893,10 +1954,10 @@ sub setup_logic_lnd_frac {
my ($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref) = @_;
#
- # fatmlndfrc is required for the MCT driver (or LILAC), but uneeded for NUOPC
+ # fatmlndfrc is required for LILAC but uneeded for NUOPC
#
my $var = "lnd_frac";
- if ( ($opts->{'driver'} eq "mct") || $opts->{'lilac'} ) {
+ if ( $opts->{'lilac'} ) {
if ( defined($opts->{$var}) ) {
if ( defined($nl->get_value('fatmlndfrc')) ) {
$log->fatal_error("Can NOT set both -lnd_frac option (set via LND_DOMAIN_PATH/LND_DOMAIN_FILE " .
@@ -2249,6 +2310,7 @@ sub setup_logic_urban {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'building_temp_method');
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'urban_hac');
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'urban_explicit_ac');
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'urban_traffic');
}
@@ -3014,8 +3076,8 @@ sub setup_logic_do_harvest {
$cannot_be_true = "$var can only be set to true when running a transient case (flanduse_timeseries non-blank)";
}
- elsif (!&value_is_true($nl->get_value('use_cn')) && !&value_is_true($nl->get_value('use_fates'))) {
- $cannot_be_true = "$var can only be set to true when running with either CN or FATES";
+ elsif (!&value_is_true($nl->get_value('use_cn'))) {
+ $cannot_be_true = "$var can only be set to true when running with CN. Please set use_cn to true.";
}
if ($cannot_be_true) {
@@ -3102,7 +3164,7 @@ sub setup_logic_spinup {
if ( $nl_flags->{'bgc_mode'} eq "sp" && defined($nl->get_value('override_bgc_restart_mismatch_dump'))) {
$log->fatal_error("CN must be on if override_bgc_restart_mismatch_dump is set.");
}
- if ( $nl_flags->{'clm_accelerated_spinup'} eq "on" ) {
+ if ( $nl_flags->{'clm_accelerated_spinup'} =~ /on|sasu/ ) {
foreach my $var ( "hist_nhtfrq", "hist_fincl1", "hist_empty_htapes", "hist_mfilt" ) {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl,
$var, use_cn=>$nl_flags->{'use_cn'}, use_fates=>$nl_flags->{'use_fates'},
@@ -3525,6 +3587,7 @@ sub setup_logic_hillslope {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'hillslope_pft_distribution_method' );
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'hillslope_soil_profile_method' );
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_hillslope_routing', 'use_hillslope'=>$nl_flags->{'use_hillslope'} );
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'hillslope_fsat_equals_zero', 'use_hillslope'=>$nl_flags->{'use_hillslope'} );
my $use_hillslope = $nl->get_value('use_hillslope');
my $use_hillslope_routing = $nl->get_value('use_hillslope_routing');
if ( (! &value_is_true($use_hillslope)) && &value_is_true($use_hillslope_routing) ) {
@@ -3581,25 +3644,6 @@ sub setup_logic_grainproduct {
#-------------------------------------------------------------------------------
-sub setup_logic_dynamic_roots {
- #
- # dynamic root model
- #
- my ($opts, $nl_flags, $definition, $defaults, $nl, $physv) = @_;
-
- add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_dynroot', 'phys'=>$physv->as_string(), 'bgc_mode'=>$nl_flags->{'bgc_mode'});
- my $use_dynroot = $nl->get_value('use_dynroot');
- if ( &value_is_true($use_dynroot) && ($nl_flags->{'bgc_mode'} eq "sp") ) {
- $log->fatal_error("Cannot turn dynroot mode on mode bgc=sp\n" .
- "Set the bgc mode to 'bgc'.");
- }
- if ( &value_is_true( $use_dynroot ) && &value_is_true( $nl_flags->{'use_hydrstress'} ) ) {
- $log->fatal_error("Cannot turn use_dynroot on when use_hydrstress is on" );
- }
-}
-
-#-------------------------------------------------------------------------------
-
sub setup_logic_c_isotope {
#
# Error checking for C-isotope options
@@ -3838,9 +3882,7 @@ sub setup_logic_popd_streams {
}
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'stream_fldfilename_popdens', 'phys'=>$nl_flags->{'phys'},
'cnfireson'=>$nl_flags->{'cnfireson'}, 'hgrid'=>"0.5x0.5", 'ssp_rcp'=>$nl_flags->{'ssp_rcp'} );
- #
- # TODO (mvertens, 2021-06-22) the following is needed for MCT since a use case enforces this - so for now stream_meshfile_popdens will be added to the mct
- # stream namelist but simply not used
+
if ($opts->{'driver'} eq "nuopc" ) {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'stream_meshfile_popdens', 'hgrid'=>"0.5x0.5");
my $inputdata_rootdir = $nl_flags->{'inputdata_rootdir'};
@@ -3854,12 +3896,6 @@ sub setup_logic_popd_streams {
$val = "e_string( $val );
$nl->set_variable_value($group, $var, $val);
}
- } else {
- my $var = 'stream_meshfile_popdens';
- my $group = $definition->get_group_name($var);
- my $val = "none";
- $val = "e_string( $val );
- $nl->set_variable_value($group, $var, $val);
}
} else {
# If bgc is NOT CN/CNDV or fire_method==nofire then make sure none of the popdens settings are set
@@ -3896,7 +3932,7 @@ sub setup_logic_urbantv_streams {
'sim_year_range'=>$nl_flags->{'sim_year_range'});
}
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'stream_fldfilename_urbantv', 'phys'=>$nl_flags->{'phys'},
- 'hgrid'=>"0.9x1.25" );
+ 'hgrid'=>"0.9x1.25", 'urban_explicit_ac'=>$nl->get_value('urban_explicit_ac') );
if ($opts->{'driver'} eq "nuopc" ) {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'stream_meshfile_urbantv', 'phys'=>$nl_flags->{'phys'},
'hgrid'=>"0.9x1.25" );
@@ -4009,7 +4045,7 @@ sub setup_logic_dust_emis {
foreach my $option ( @zender_files_in_lnd_opts ) {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $option,
'dust_emis_method'=>$dust_emis_method, 'zender_soil_erod_source'=>$zender_source,
- 'hgrid'=>$nl_flags->{'res'}, 'lnd_tuning_mod'=>$nl_flags->{'lnd_tuning_mode'} );
+ 'hgrid'=>$nl_flags->{'res'}, 'lnd_tuning_mode'=>$nl_flags->{'lnd_tuning_mode'} );
}
} else {
foreach my $option ( @zender_files_in_lnd_opts ) {
@@ -4515,13 +4551,27 @@ sub setup_logic_fates {
if (&value_is_true( $nl_flags->{'use_fates'}) ) {
add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fates_paramfile', 'phys'=>$nl_flags->{'phys'});
my @list = ( "fates_spitfire_mode", "use_fates_planthydro", "use_fates_ed_st3", "use_fates_ed_prescribed_phys",
- "use_fates_inventory_init","use_fates_fixed_biogeog","use_fates_nocomp","fates_seeddisp_cadence",
- "use_fates_logging","fates_parteh_mode", "use_fates_cohort_age_tracking","use_fates_tree_damage",
- "use_fates_luh","fates_history_dimlevel" );
+ "use_fates_inventory_init","fates_seeddisp_cadence","fates_history_dimlevel",
+ "fates_harvest_mode","fates_parteh_mode", "use_fates_cohort_age_tracking","use_fates_tree_damage" );
+
foreach my $var ( @list ) {
- add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'use_fates'=>$nl_flags->{'use_fates'},
- 'use_fates_sp'=>$nl_flags->{'use_fates_sp'} );
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'use_fates'=>$nl_flags->{'use_fates'},
+ 'use_fates_sp'=>$nl_flags->{'use_fates_sp'} );
}
+
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_potentialveg', 'use_fates'=>$nl_flags->{'use_fates'});
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_lupft', 'use_fates'=>$nl_flags->{'use_fates'});
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_luh', 'use_fates'=>$nl_flags->{'use_fates'},
+ 'use_fates_lupft'=>$nl->get_value('use_fates_lupft'),
+ 'use_fates_potentialveg'=>$nl->get_value('use_fates_potentialveg'),
+ 'fates_harvest_mode'=>remove_leading_and_trailing_quotes($nl->get_value('fates_harvest_mode')) );
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_nocomp', 'use_fates'=>$nl_flags->{'use_fates'},
+ 'use_fates_lupft'=>$nl->get_value('use_fates_lupft'),
+ 'use_fates_sp'=>$nl_flags->{'use_fates_sp'} );
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_fixed_biogeog', 'use_fates'=>$nl_flags->{'use_fates'},
+ 'use_fates_lupft'=>$nl->get_value('use_fates_lupft'),
+ 'use_fates_sp'=>$nl_flags->{'use_fates_sp'} );
+
my $suplnitro = $nl->get_value('suplnitro');
my $parteh_mode = $nl->get_value('fates_parteh_mode');
if ( ($parteh_mode == 1) && ($suplnitro !~ /ALL/) && not &value_is_true( $nl_flags->{'use_fates_sp'}) ) {
@@ -4529,7 +4579,7 @@ sub setup_logic_fates {
"but and FATES-SP is not active, but fates_parteh_mode is 1, so Nitrogen is not active" .
"Change suplnitro back to ALL");
}
- #
+
# For FATES SP mode make sure no-competetiion, and fixed-biogeography are also set
# And also check for other settings that can't be trigged on as well
#
@@ -4546,6 +4596,16 @@ sub setup_logic_fates {
if ( $nl->get_value('fates_spitfire_mode') > 0 ) {
$log->fatal_error('fates_spitfire_mode can NOT be set to greater than 0 when use_fates_sp is true');
}
+
+ # fates landuse can't be on with FATES SP mode is active
+ if ( &value_is_true($nl->get_value('use_fates_luh')) ) {
+ $log->fatal_error('use_fates_luh can NOT be true when use_fates_sp is true');
+ }
+
+ # hydro isn't currently supported to work when FATES SP mode is active
+ if (&value_is_true( $nl->get_value('use_fates_planthydro') )) {
+ $log->fatal_error('fates sp mode is currently not supported to work with fates hydro');
+ }
}
}
my $var = "use_fates_inventory_init";
@@ -4560,22 +4620,210 @@ sub setup_logic_fates {
}
}
}
+ # make sure that fates landuse x pft mode has the necessary run mode configurations
+ my $var = "use_fates_lupft";
+ if ( defined($nl->get_value($var)) ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ my @list = ( "use_fates_luh", "use_fates_nocomp", "use_fates_fixed_biogeog" );
+ foreach my $var ( @list ) {
+ if ( ! &value_is_true($nl->get_value($var)) ) {
+ $log->fatal_error("$var is required when use_fates_lupft is true" );
+ }
+ }
+ }
+ }
+ # check that fates landuse change mode has the necessary luh2 landuse timeseries data
+ # and add the default if not defined. Do not add default if use_fates_potentialveg is true.
+ # If fixed biogeography is on, make sure that flandusepftdat is avilable.
my $var = "use_fates_luh";
if ( defined($nl->get_value($var)) ) {
if ( &value_is_true($nl->get_value($var)) ) {
- $var = "fluh_timeseries";
- add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'phys'=>$nl_flags->{'phys'}, 'hgrid'=>$nl_flags->{'res'}, 'sim_year_range'=>$nl_flags->{'sim_year_range'}, nofail=>1 );
+ $var = "use_fates_potentialveg";
+ if ( defined($nl->get_value($var)) ) {
+ if ( ! &value_is_true($nl->get_value($var)) ) {
+ $var = "fluh_timeseries";
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'use_fates'=>$nl_flags->{'use_fates'},
+ 'hgrid'=>$nl_flags->{'res'}, 'sim_year_range'=>$nl_flags->{'sim_year_range'});
+ my $fname = remove_leading_and_trailing_quotes( $nl->get_value($var) );
+ if ( ! defined($nl->get_value($var)) ) {
+ $log->fatal_error("$var is required when use_fates_luh is set and use_fates_potentialveg is false" );
+ } elsif ( ! -f "$fname" ) {
+ $log->fatal_error("$var does NOT point to a valid filename" );
+ }
+ }
+ }
+ $var = "use_fates_fixed_biogeog";
+ if ( defined($nl->get_value($var)) ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $var = "flandusepftdat";
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'use_fates'=>$nl_flags->{'use_fates'},
+ 'phys'=>$nl_flags->{'phys'}, 'hgrid'=>$nl_flags->{'res'}, nofail=>1 );
+ my $fname = remove_leading_and_trailing_quotes( $nl->get_value($var) );
+ if ( ! defined($nl->get_value($var)) ) {
+ $log->fatal_error("$var is required when use_fates_luh and use_fates_fixed_biogeog is set" );
+ } elsif ( ! -f "$fname" ) {
+ $log->fatal_error("$var does NOT point to a valid filename" );
+ }
+ }
+ }
+ }
+ }
+ # check that fates landuse is on and harvest mode is off when potential veg switch is true
+ my $var = "use_fates_potentialveg";
+ if ( defined($nl->get_value($var)) ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ if ( ! &value_is_true($nl->get_value('use_fates_luh')) ) {
+ $log->fatal_error("use_fates_luh must be true when $var is true" );
+ }
+ my $var = remove_leading_and_trailing_quotes($nl->get_value('fates_harvest_mode'));
+ if ( $var ne 'no_harvest') {
+ $log->fatal_error("fates_harvest_mode set to $var. It must set to no_harvest when use_potential_veg is true." );
+ }
+ my $var = "fluh_timeseries";
+ if ( defined($nl->get_value($var)) ) {
+ $log->fatal_error("fluh_timeseries can not be defined when use_fates_potentialveg is true" );
+ }
+ }
+ }
+ # Check fates_harvest_mode compatibility
+ my $var = "fates_harvest_mode";
+ if ( defined($nl->get_value($var)) ) {
+ # using fates_harvest mode with raw luh2 harvest data
+ my $mode = remove_leading_and_trailing_quotes($nl->get_value($var));
+ if ( $mode eq 'luhdata_area' || $mode eq 'luhdata_mass' ) {
+ # Make sure that use_fates_luh is true when using raw fates luh2 harvest data
+ if ( ! &value_is_true($nl->get_value('use_fates_luh')) ) {
+ $log->fatal_error("use_fates_luh is required to be true when $var is luhdata_mass or luhdata_area" );
+ }
+ } elsif ( $mode eq 'landuse_timeseries' ) {
+ # Check to make sure that the user set the flanduse_timeseries file
+ # Since the flanduse_timeseries logic checking is upstream of the fates logic,
+ # don't add the default here. The onus is on the user to match the correct timeseries
+ # data to the correct surface dataset resolution
+ my $var = "flanduse_timeseries";
my $fname = remove_leading_and_trailing_quotes( $nl->get_value($var) );
if ( ! defined($nl->get_value($var)) ) {
- $log->fatal_error("$var is required when use_fates_luh is set" );
+ $log->fatal_error("$var is required when fates_harvest_mode is landuse_timeseries" );
} elsif ( ! -f "$fname" ) {
- $log->fatal_error("$fname does NOT point to a valid filename" );
+ $log->fatal_error("$var does NOT point to a valid filename" );
}
}
}
}
}
+
+#-------------------------------------------------------------------------------
+
+sub setup_logic_cnmatrix {
+ #
+ # Set some default options related to the CN Matrix options
+ #
+ my ($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref) = @_;
+
+ my @matrixlist = ( "use_matrixcn", "hist_wrt_matrixcn_diag" );
+ foreach my $var ( @matrixlist ) {
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var
+ , 'use_fates'=>$nl_flags->{'use_fates'}, 'bgc_mode'=>$nl_flags->{'bgc_mode'}
+ , 'phys'=>$nl_flags->{'phys'}, 'use_soil_matrixcn'=>$nl_flags->{'use_soil_matrixcn'},
+ , 'spinup_matrixcn'=>$nl_flags->{'spinup_matrixcn'}, 'clm_accelerated_spinup'=>$nl_flags->{'clm_accelerated_spinup'} );
+ }
+ @matrixlist = ( "use_matrixcn", "use_soil_matrixcn", "hist_wrt_matrixcn_diag", "spinup_matrixcn" );
+ # Matrix items can't be on for OMP_NUM_THREADS (also known as NTHRDS_LND) > 1
+ my $var_xml = "OMP_NUM_THREADS";
+ my $val_xml = $ENV{$var_xml};
+ if ( $val_xml > 1) {
+ foreach my $var ( @matrixlist ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $log->warning("$var and $var_xml > 1 (in this case $val_xml) causes a clm threading test to FAIL (as of 2024/7/10), so use at your own risk." );
+ }
+ }
+ }
+
+ # Matrix items can't be on for transient
+ if (not string_is_undef_or_empty($nl->get_value('flanduse_timeseries'))) {
+ foreach my $var ( @matrixlist ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $log->warning("$var may FAIL with balance error in transient mode" );
+ }
+ }
+ }
+ # Matrix items can't be on for SP mode
+ if ( $nl_flags->{'bgc_mode'} eq "sp" ) {
+ foreach my $var ( @matrixlist ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $log->fatal_error("$var can NOT be on for SP mode" );
+ }
+ }
+ # Matrix items can't be on for FATES
+ } elsif ( $nl_flags->{'bgc_mode'} eq "fates" ) {
+ foreach my $var ( @matrixlist ) {
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $log->fatal_error("$var can NOT be on with FATES" );
+ }
+ }
+ # Otherwise for CN or BGC mode
+ } else {
+ # TODO (slevis 2023/12/1) The next two if statements do nothing. Erik K and Sam L found that
+ # for_testing_use_second_grain_pool and for_testing_use_repr_structure_pool
+ # are empty rather than .true. or .false., but we did not get to the bottom
+ # of why, yet. The same error-check in the code does get triggered at run-time,
+ # so we will not pursue fixing this right now.
+ # If matrixcn is on, for_testing_use_second_grain_pool and for_testing_use_repr_structure_pool must be off
+ if ( &value_is_true($nl->get_value("use_matrixcn")) && &value_is_true($nl_flags->{"for_testing_use_second_grain_pool"}) ) {
+ $log->fatal_error("for_testing_use_second_grain_pool can NOT be on when use_matrixcn is on" );
+ }
+ if ( &value_is_true($nl->get_value("use_matrixcn")) && &value_is_true($nl_flags->{"for_testing_use_repr_structure_pool"}) ) {
+ $log->fatal_error("for_testing_use_repr_structure_pool can NOT be on when use_matrixcn is on" );
+ }
+ # If both matrixcn and soil_matrix are off hist_wrt_matrixcn_diag can't be on
+ if ( ! &value_is_true($nl->get_value("use_matrixcn")) && ! &value_is_true($nl_flags->{"use_soil_matrixcn"}) ) {
+ my $var = "hist_wrt_matrixcn_diag";
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $log->fatal_error("$var can NOT be on when both use_matrixcn and use_soil_matrixcn are off" );
+ }
+ }
+ # If soil_matrix is off spinup_matrixcn can't be on
+ if ( ! &value_is_true($nl_flags->{"use_soil_matrixcn"}) ) {
+ my $var = "spinup_matrixcn";
+ if ( &value_is_true($nl->get_value($var)) ) {
+ $log->fatal_error("$var can NOT be on when use_soil_matrixcn is off" );
+ }
+ }
+ }
+ # if soil matrix is on and spinup is on, set spinup specific variables
+ my @spinup_vars = ( "nyr_forcing", "nyr_sasu", "iloop_avg" );
+ foreach my $var ( @spinup_vars ) {
+ if ( &value_is_true($nl_flags->{"use_soil_matrixcn"}) && &value_is_true($nl_flags->{'spinup_matrixcn'}) ) {
+ if ( $var ne "nyr_sasu" ) {
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var,
+ , 'phys'=>$nl_flags->{'phys'}, 'spinup_matrixcn'=>$nl_flags->{'spinup_matrixcn'} );
+ } else {
+ # Set SASU spinup period to nyr_forcing (slow mode) by default
+ add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var,
+ , 'val'=>$nl->get_value("nyr_forcing") );
+ }
+ my $val = $nl->get_value($var);
+ if ( $val == -999 && ($var eq "iloop_avg") ) { next; } # iloop_avg can be special flag value
+ if ( $val < 1 ) {
+ $log->fatal_error("$var can NOT be negative or zero" );
+ }
+ } else {
+ my $val = $nl->get_value($var);
+ if ( defined($val) ) {
+ $log->fatal_error("$var can NOT be set when use_soil_matrixcn and isspsinup are off" );
+ }
+ }
+ }
+ if ( &value_is_true($nl_flags->{"use_soil_matrixcn"}) && &value_is_true($nl_flags->{'spinup_matrixcn'}) ) {
+ my $nyr_forcing = $nl->get_value('nyr_forcing');
+ my $nyr_sasu = $nl->get_value('nyr_sasu');
+ if ( $nyr_sasu > $nyr_forcing ) {
+ $log->fatal_error("nyr_sasu can NOT be greater than nyr_forcing" );
+ }
+ }
+}
+
#-------------------------------------------------------------------------------
sub setup_logic_exice {
#
diff --git a/bld/config_files/config_definition_ctsm.xml b/bld/config_files/config_definition_ctsm.xml
index dfe6378f17..e6628b1d94 100644
--- a/bld/config_files/config_definition_ctsm.xml
+++ b/bld/config_files/config_definition_ctsm.xml
@@ -18,11 +18,11 @@ Root directory of CLM source distribution (directory above CLM configure).
Component framework interface to use
-(Model Coupling Toolkit, or Earth System Modeling Framework)
+(Earth System Modeling Framework)
off
-2
-1
-2
-1
-0
+2
+1
+2
+1
+0
+0
+0.true.
@@ -66,7 +68,8 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
.false.
-.true.
+.true.
+.true.'TOTECOSYSC','TOTECOSYSN','TOTSOMC','TOTSOMN','TOTVEGC','TOTVEGN','TLAI','GPP','NPP','TWS','TSAI','HTOP','HBOT''TOTSOMC','TOTSOMN','TLAI','GPP','NPP','TWS''TLAI','TWS'
--8760
-20
+'TOTECOSYSC','TOTECOSYSN','TOTSOMC','TOTSOMN','TOTVEGC','TOTVEGN','TLAI','GPP','NPP','TWS','TSAI','HTOP','HBOT'
+'TOTECOSYSC','TOTECOSYSN','TOTSOMC','TOTSOMN','TOTVEGC','TOTVEGN','TLAI','GPP','CPOOL','NPP','TWS'
+'TOTSOMC','TOTSOMN','TLAI','GPP','NPP','TWS'
+'TLAI','TWS'
+-8760
+-8760
+20
+20.false.
@@ -115,8 +128,8 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP534os_3x1_global_1850-2100_yearly_c181209.nclnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP5B_3x1_global_1850-2100_yearly_c181209.nc
-
-.false.
+
+.false..true..false.
@@ -124,9 +137,10 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
.false.
-1
-0
-0
+1
+0
+0
+0NONE
@@ -146,6 +160,12 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
FASTNONE
+
+.false.
+.false.
+.false.
+.false.
+
.false.
@@ -243,18 +263,24 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
>30.0d00
30.0d00
+20.0d0020.0d0020.0d0020.0d00
+20.0d0020.0d0020.0d0020.0d00
+20.0d0020.0d000.010d00
0.010d00
+0.008d000.008d000.008d000.008d00
+0.008d000.008d000.008d000.008d00
+0.008d000.008d00Jordan1991
Sturm1997
+Sturm1997
-lnd/clm2/paramdata/fates_params_api.32.0.0_12pft_c231215.nc
+lnd/clm2/paramdata/fates_params_api.36.0.0_12pft_c240517.nc
@@ -567,9 +600,6 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
.true.
-
-.false.
-
.false..true.
@@ -595,12 +625,36 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
StandardUniform.true.
+.false.
+.true..false..true..false.
+
+
+.false.
+.true.
+
+.false.
+.true.
+
+.false.
+.false.
+.false.
+.true.
+.true.
+.false.
+1
+20
+
+1
+-999
+-999
+
+
.true.
@@ -633,7 +687,66 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
For an exact match for these grids
======================================== -->
+
+.true.
+.true.
+.true.
+.true.
+
+.true.
+
+
+.true.
+
+
+.true.
+.true.
+.true.
+.true.
+
+.true.
+
+
+.true.
+
+
+
+.true.
+.true.
+.true.
+.true.
+
+.true.
+
+
+.true.
+
+
.true..true..true.
+.true..true.
@@ -732,6 +846,10 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.true. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+
+
hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
@@ -753,38 +871,21 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
>hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
-
-
-hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
-
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
-
-
-hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+hgrid=ne30np4.pg3 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
-
-
-
-hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false.
+hgrid=ne30np4.pg3 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1
@@ -796,6 +897,10 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
>hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
@@ -819,6 +924,150 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
>hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+
+hgrid=ne120np4.pg3 maxpft=79 mask=tx0.1v3 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+
+hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
+
+
+
+
hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
-hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
@@ -956,7 +1205,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false.
@@ -969,21 +1218,28 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.I1850Clm45BgcGs.0901-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
lnd/clm2/initdata_map/clmi.I1850Clm45BgcCruGs.1101-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
lnd/clm2/initdata_map/clmi.B1850Clm45BgcGs.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
+
+
+lnd/clm2/initdata_map/clmi.B1850Clm45BgcGs.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
@@ -991,7 +1247,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
@@ -1011,63 +1267,34 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
>lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm50BgcCropCru-ciso.1526-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
-
lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc
-lnd/clm2/initdata_map/clmi.I1850Clm50SpCru.1706-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
-
-
-
-lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
-
-lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
-
-
-
lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc
-
lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
-
-lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc
+ lnd_tuning_mode="clm5_0_CRUv7" use_init_interp=".true."
+>lnd/clm2/initdata_map/clmi.I1850Clm50SpCru.1706-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc
-
-
-
+
+lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc
+ lnd_tuning_mode="clm6_0_GSWP3v1" use_init_interp=".true."
+>lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm60BgcCrop-ciso.1361-01-01.ne30pg3_mg17_c240317.nc
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
+
+
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
+
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
+
+
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
+
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
+
+
+
+
+lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
+
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
+
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
+
+
+
+lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
+
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
+
+
+
+lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
+
+
+
+lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
+
+
+
+lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
+
+
+
+
+lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
+
+
-
+
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1150,7 +1596,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1158,7 +1604,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1166,15 +1612,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
-
+
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
@@ -1196,7 +1642,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1205,21 +1651,22 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
+
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1227,7 +1674,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1235,7 +1682,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1243,15 +1690,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
-
+
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
@@ -1259,7 +1706,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1268,21 +1715,22 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
+
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc
@@ -1290,7 +1738,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc
@@ -1298,7 +1746,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc
@@ -1306,15 +1754,15 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc
-
+
lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc
@@ -1322,7 +1770,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc
@@ -1331,7 +1779,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc
@@ -1395,6 +1843,8 @@ lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne16np4.pg3_hist_2000_78pfts_c240216.n
lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_0.125nldas2_hist_2000_78pfts_c240216.nc
+
+lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_T42_hist_2000_78pfts_c240425.nc
lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_360x720cru_hist_2000_78pfts_c240216.nc
@@ -1469,11 +1919,11 @@ lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne16np4.pg3_hist_1850_78pfts_c240216.n
lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne120np4.pg3_hist_1850_78pfts_c240216.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_1979_78pfts_c240425.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_1979_78pfts_c240425.nc
-lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_hist_1850_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_hist_1979_78pfts_c240425.nc
lnd/clm2/surfdata_esmf/NEON/landuse.timeseries_NEON_${NEONSITE}_hist_78pfts_simyr2018-2023_c230931.nc
-->
+
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nclnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240216.nc
+lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc
+lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979-2026_78pfts_c240425.nc
+lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4CONUS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc
+
lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP3-7.0_1850-2100_78pfts_c240216.nc
@@ -1592,14 +2050,17 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c
-
+
lnd/clm2/surfdata_map/fates-sci.1.68.3_api.31.0.0_tools.1.0.1/LUH2_states_transitions_management.timeseries_4x5_hist_simyr1850-2015_c231101.nc
+
+lnd/clm2/surfdata_map/fates-sci.1.77.0_api.36.0.0/fates_landuse_pft_map_4x5_240206.nc
+
lnd/clm2/surfdata_map/fates-sci.1.68.3_api.31.0.0_tools.1.0.1/LUH2_states_transitions_management.timeseries_4x5_hist_simyr1850-2015_c231101.nc
+ >lnd/clm2/surfdata_map/fates-sci.1.68.3_api.31.0.0_tools.1.0.1/LUH2_states_transitions_management.timeseries_4x5_hist_simyr0850-2015_c240216.nc
.false.
@@ -1638,6 +2099,7 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c
.false..false..true.
+.true..true..false.
@@ -1906,12 +2368,18 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c
18502106
-lnd/clm2/urbandata/CTSM52_tbuildmax_OlesonFeddema_2020_0.9x1.25_simyr1849-2106_c200605.nc
-/glade/work/xinchang/02_Explicit_AC_Adoption/02_data_present_day/CTSM52_urbantv_Li_2024_0.9x1.25_simyr1849-2106_c20230621.nc
+lnd/clm2/urbandata/CTSM52_tbuildmax_OlesonFeddema_2020_0.9x1.25_simyr1849-2106_c200605.nc
-/glade/work/xinchang/02_Explicit_AC_Adoption/02_data_present_day/CTSM52_urbantv_Li_2024_0.9x1.25_simyr1849-2106_c20230621.nc
+lnd/clm2/urbandata/CLM50_tbuildmax_Oleson_2016_0.9x1.25_simyr1849-2106_c160923.nc
+/glade/work/xinchang/02_Explicit_AC_Adoption/02_data_present_day/CTSM52_urbantv_Li_2024_0.9x1.25_simyr1849-2106_c20230621.nclnd/clm2/urbandata/CLM45_tbuildmax_Oleson_2016_0.9x1.25_simyr1849-2106_c160923.nc
@@ -1994,47 +2462,77 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c
Zender_2003atmbilinear
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2_cam5.4-forCLM_cdf5_c240202.nc
+lnd/clm2/dustemisdata/dst_source2x2_cam5.4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2_cam5.4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2_cam5.4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2tuned-cam4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2tuned-cam4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2tuned-cam4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2tuned-cam4-forCLM_cdf5_c240202.nc
+lnd/clm2/dustemisdata/dst_source1x1tuned-cam4-forCLM_cdf5_c240202.nc
+lnd/clm2/dustemisdata/dst_source1x1tuned-cam4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source1x1tuned-cam4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source1x1tuned-cam4-forCLM_cdf5_c240202.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
-lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc
+lnd/clm2/dustemisdata/dst_source2x2tunedcam6-2x2-forCLM_cdf5_c230312.nc0
+no_harvest.false..false..false..false..false.
-.false..false..false.
-.false.
+.false.
+.false.
+.true.
+.true.
+.true.
+.true.
+.false.10
+.true.
+.true.
+.false.
+.true.
+.true.
+.false.2,2
-.true.
-.false.
-.true.
-.false.
+
diff --git a/bld/namelist_files/namelist_definition_ctsm.xml b/bld/namelist_files/namelist_definition_ctsm.xml
index 37c457141c..0912617872 100644
--- a/bld/namelist_files/namelist_definition_ctsm.xml
+++ b/bld/namelist_files/namelist_definition_ctsm.xml
@@ -734,10 +734,17 @@ Toggle to turn on no competition mode (only relevant if FATES is being used).
Toggle to turn on FATES satellite phenology mode (only relevant if FATES is being used).
-
-Toggle to turn on the logging module
-(Only relevant if FATES is on)
+
+Set FATES harvesting mode by setting fates_harvest_mode to a valid string option.
+Allowed values are:
+ no_harvest: no fates harvesting of any kind
+ event_code: fates logging via fates logging event codes (see fates parameter file) only
+ landuse_timeseries: fates harvest driven by CLM flanduse_timeseries file (dynHarvestMod)**
+ luhdata_area: fates harvest driven by LUH2 raw harvest data, area-based (dynFATESLandUseChangeMod)
+ luhdata_mass: fates harvest driven by LUH2 raw harvest data, mass-based (dynFATESLandUseChangeMod)
+**Note that the landuse_timeseries option is not the same as the FATES fluh_timeseries data file.
+This option is older than the luhdata options and may be depricated at some point in the future.
If TRUE, enable use of land use harmonization (LUH) state and transition data from luh_timeseries file.
+This is enabled by default if fates_harvest_mode is set to use the raw LUH2 harvest data
(Also, only valid for use_fates = true and is incompatible with transient runs currently.)
+
+If TRUE, enable use of FATES land use with no competition and fixed biogeography. This mode
+requires the use of the land use x pft association static data map file. See the
+flandusepftdat definition entry in this file for more information.
+(Only valid for use_fates = true and is incompatible with transient runs currently.)
+
+
+
+If TRUE, ignore the land-use state vector and transitions, and assert that all lands
+are primary, and that there is no harvest. This mode is only relevant for FATES
+spin-up workflows that are intending to use the spin-up restart output to start a
+FATES land use transient case using the use_fates_lupft namelist option. The option
+should be set to true for the spin-up case and false for the transient case.
+
+
@@ -806,6 +831,18 @@ types to vary over time.
(Only relevant if FATES is on).
+
+Full pathname of fates landuse x pft association static data map.
+The file associates land use types with pfts across a static global map.
+This file is necessary for running FATES with use_fates_luh,
+use_fates_nocomp, and use_fates_fixedbiogeo engaged (note that use_fates_lupft
+is provided as a namelist option to engage all necessary options). The file is output
+by the FATES land use data tool (https://github.com/NGEET/tools-fates-landusedata)
+which processes the raw land use data from the THEMIS tool data sets
+(https://doi.org/10.5065/29s7-7b41)
+
+
Toggle to turn on the LUNA model, to effect Photosynthesis by leaf Nitrogen
@@ -828,6 +865,11 @@ Toggle to turn on meteorological downscaling in hillslope model
Toggle to turn on surface water routing in the hillslope hydrology model
+
+If true, set fsat to zero for hillslope columns
+
+
Method for calculating hillslope saturated head gradient
@@ -1070,6 +1112,11 @@ Turn urban air conditioning/heating ON or OFF and add wasteheat:
ON_WASTEHEAT = Air conditioning/heating is ON and waste-heat sent to urban canyon
+
+If TRUE, use explicit, time-varying AC adoption rate for air-conditioning flux and interior building temperature calculations.
+
+
If TRUE, urban traffic flux will be activated (Currently NOT implemented).
@@ -1136,6 +1183,50 @@ e.g., because we have integrated AgSys and have tests of it that make
these software infrastructure tests obsolete.
+
+
+
+
+Turn on the Matrix solution for above ground biogeochemistry, requires CN to be on
+
+
+
+Turn on the Matrix solution for soil biogeochemistry
+
+
+
+Turn on extra output for the matrix solution
+
+
+
+Turn on semi-analytic spinup solution for the CN/Soil matrix, requires soil matrix to be on
+This will drive the solution to equilibrium
+
+
+
+Number of years to average the storage capacitance over for the soil Matrix solution during semi-analytic spinup (spinup_matrixcn=T)
+Normally should be the same as the number of years the atmospheric forcing is run over
+
+
+
+length of each semi-analytic solution. eg. nyr_SASU=5, analytic solutions will be calculated every five years.
+nyr_SASU=1: the fastest SASU, but inaccurate; nyr_SASU=nyr_forcing(eg. 20): the lowest SASU but accurate
+
+
+
+The restart file will be based on the average of all analytic solutions within the iloop_avg^th loop.
+eg. if nyr_forcing = 20, iloop_avg = 8, the restart file in yr 160 will be based on analytic solutions from yr 141 to 160.
+The number of the analytic solutions within one loop depends on ratio between nyr_forcing and nyr_SASU.
+eg. if nyr_forcing = 20, nyr_SASU = 5, number of analytic solutions is 20/5=4
+
+
@@ -1364,11 +1455,6 @@ Percentage threshold above which the model keeps the urban landunits. Selecting
Default: 0
-
-Toggle to turn on the dynamic root model
-
-
Toggle to turn on on diagnostic Snow Radiative Effect
@@ -2035,10 +2121,10 @@ hist means do NOT use a future scenario, just use historical data.
Land mask description
-
+
+ valid_values="clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_cam7.0,clm4_5_cam6.0,clm4_5_cam5.0,clm4_5_cam4.0,clm5_0_cam7.0,clm5_0_cam6.0,clm5_0_cam5.0,clm5_0_cam4.0,clm5_0_CRUv7,clm5_0_GSWP3v1,clm5_1_GSWP3v1,clm5_1_CRUv7,clm5_1_cam7.0,clm5_1_cam6.0,clm5_1_cam5.0,clm5_1_cam4.0,clm6_0_GSWP3v1,clm6_0_cam7.0,clm6_0_cam6.0,clm6_0_cam5.0,clm6_0_cam4.0">
General configuration of model version and atmospheric forcing to tune the model to run under.
This sets the model to run with constants and initial conditions that were set to run well under
the configuration of model version and atmospheric forcing. To run well constants would need to be changed
@@ -2096,21 +2182,21 @@ How close in years to use when looking for an initial condition file (finidat) i
Simulation years you can look for in initial condition files (finidat) if interpolation is turned on (use_init_interp is .true.)
-
+
Command line argument for setting up your simulation in a mode for faster
throughput. By default turns off some options, and sets up for a lower level
of output. When bgc_mode is some level of prognostic BGC (so NOT Satellite Phenology)
-it also sets up for accelerated decomposition.
+it also sets up for accelerated decomposition. The "sasu" mode sets up
+for using the CN-matrix mode with Semi-Analytic Spin Up.
NOTE: THIS CORRESPONDS DIRECTLY TO THE env_run.xml VARIABLE OF THE SAME NAME.
Set the env_run variable, rather than setting this directly.
+ group="default_settings" valid_values="sp,bgc,fates" >
Command line arguement for biogeochemistry mode for CLM4.5
sp = Satellitte Phenology
- cn = Carbon Nitrogen model
bgc = CLM4.5 BGC model with:
CENTURY model pools
Nitrification/De-nitrification
diff --git a/bld/namelist_files/use_cases/1850_control.xml b/bld/namelist_files/use_cases/1850_control.xml
index 94ee8c5d0d..6ea033629f 100644
--- a/bld/namelist_files/use_cases/1850_control.xml
+++ b/bld/namelist_files/use_cases/1850_control.xml
@@ -8,47 +8,18 @@
constant
-.false.
-.false.
-.false.
+.false.
-1850
-1850
+1850
+1850
-1850
-1850
+1850
+1850
-1850
-1850
+1850
+1850
-1850
-1850
-
-1850
-1850
-
-1850
-1850
-
-1850
-1850
-
-1850
-1850
-
-1850
-1850
-
-lnd/clm2/ndepdata/fndep_clm_WACCM6_CMIP6piControl001_y21-50avg_1850monthly_0.95x1.25_c180802.nc
-
-lnd/clm2/ndepdata/fndep_clm_WACCM6_CMIP6piControl001_y21-50avg_1850monthly_0.95x1.25_c180802.nc
-
-lnd/clm2/ndepdata/fndep_clm_WACCM6_CMIP6piControl001_y21-50avg_1850monthly_0.95x1.25_c180802.nc
-
-cycle
-cycle
diff --git a/bld/namelist_files/use_cases/1850_noanthro_control.xml b/bld/namelist_files/use_cases/1850_noanthro_control.xml
index 636164a729..d84903f43c 100644
--- a/bld/namelist_files/use_cases/1850_noanthro_control.xml
+++ b/bld/namelist_files/use_cases/1850_noanthro_control.xml
@@ -10,26 +10,11 @@
.false.
-1850
-1850
+1850
+1850
-1850
-1850
-
-1850
-1850
-
-cycle
-cycle
-
-1925
-1925
-
-1925
-1925
-
-1925
-1925
+1925
+1925none
nn
-1850
-1850
-
-1850
-1850
-
-1850
-1850
+1850
+1850NONE
diff --git a/bld/namelist_files/use_cases/2000_control.xml b/bld/namelist_files/use_cases/2000_control.xml
index f3c4980fc8..2fce7c5cce 100644
--- a/bld/namelist_files/use_cases/2000_control.xml
+++ b/bld/namelist_files/use_cases/2000_control.xml
@@ -8,37 +8,17 @@
constant
-.true.
-.false.
-.true.
-.false.
-.false.
+.true.
+.false.
+.false.
-2000
-2000
+2000
+2000
-2000
-2000
+2000
+2000
-2000
-2000
-
-2000
-2000
-
-2000
-2000
-
-2000
-2000
-
-2000
-2000
-
-2000
-2000
-
-2000
-2000
+2000
+2000
diff --git a/bld/namelist_files/use_cases/2010_control.xml b/bld/namelist_files/use_cases/2010_control.xml
index 9316ecfb7f..2f72624077 100644
--- a/bld/namelist_files/use_cases/2010_control.xml
+++ b/bld/namelist_files/use_cases/2010_control.xml
@@ -8,44 +8,17 @@
constant
-.true.
-.true.
-.false.
-.true.
-.false.
-.false.
+.true.
+.false.
+.false.
-2010
-2010
+2010
+2010
-2010
-2010
+2010
+2010
-2010
-2010
-
-2010
-2010
-
-2010
-2010
-
-2010
-2010
-
-2010
-2010
-
-2010
-2010
-
-2010
-2010
-
-2010
-2010
-
-2010
-2010
+2010
+2010
diff --git a/bld/namelist_files/use_cases/20thC_transient.xml b/bld/namelist_files/use_cases/20thC_transient.xml
index d6dd729b35..6cbf9e0d38 100644
--- a/bld/namelist_files/use_cases/20thC_transient.xml
+++ b/bld/namelist_files/use_cases/20thC_transient.xml
@@ -18,46 +18,20 @@
flanduse_timeseries
-.true.
-.false.
-.true.
-.false.
-.false.
-
-1850
-2015
-1850
-
-1850
-2015
-1850
-
-1850
-2015
-1850
-
-1850
-2016
-1850
-
-1850
-2016
-1850
-
-1850
-2016
-1850
-
-1850
-2106
-1850
-
-1850
-2106
-1850
-
-1850
-2106
-1850
+.true.
+.false.
+.false.
+
+1850
+2015
+1850
+
+1850
+2016
+1850
+
+1850
+2106
+1850
diff --git a/bld/namelist_files/use_cases/stdurbpt_pd.xml b/bld/namelist_files/use_cases/stdurbpt_pd.xml
index 65786f32ae..6f5e754ba0 100644
--- a/bld/namelist_files/use_cases/stdurbpt_pd.xml
+++ b/bld/namelist_files/use_cases/stdurbpt_pd.xml
@@ -18,10 +18,8 @@
'OFF'
-.true.
-.false.
-.true.
-.false.
-.false.
+.true.
+.false.
+.false.
diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl
index c8875090cd..d02d68b06b 100755
--- a/bld/unit_testers/build-namelist_test.pl
+++ b/bld/unit_testers/build-namelist_test.pl
@@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile {
#
# Figure out number of tests that will run
#
-my $ntests = 2511;
+my $ntests = 3314;
if ( defined($opts{'compare'}) ) {
- $ntests += 1545;
+ $ntests += 2052;
}
plan( tests=>$ntests );
@@ -314,7 +314,7 @@ sub cat_and_create_namelistinfile {
print "=================================================================================\n";
my $startfile = "clmrun.clm2.r.1964-05-27-00000.nc";
-foreach my $driver ( "mct", "nuopc" ) {
+foreach my $driver ( "nuopc" ) {
print " For $driver driver\n\n";
# configuration, structure, irrigate, verbose, clm_demand, ssp_rcp, test, sim_year, use_case
foreach my $options ( "-res 0.9x1.25 -configuration nwp",
@@ -326,6 +326,11 @@ sub cat_and_create_namelistinfile {
"-res 0.9x1.25 -use_case 1850_control",
"-res 1x1pt_US-UMB -clm_usr_name 1x1pt_US-UMB -namelist '&a fsurdat=\"/dev/null\"/'",
"-res 1x1_brazil",
+ "-namelist '&a use_matrixcn=F,use_soil_matrixcn=F,hist_wrt_matrixcn_diag=F,spinup_matrixcn=F/' -bgc sp",
+ "-namelist '&a use_matrixcn=T,use_soil_matrixcn=T,hist_wrt_matrixcn_diag=T,spinup_matrixcn=T/' -bgc bgc -crop -clm_accelerated_spinup on",
+ "-namelist \"&a soil_decomp_method='MIMICSWieder2015',use_matrixcn=F/\" -bgc bgc -crop",
+ "-namelist \"&a soil_decomp_method='MIMICSWieder2015',use_matrixcn=T/\" -bgc bgc -crop",
+ "-bgc bgc -crop -clm_accelerated_spinup sasu",
"-res 0.9x1.25 -clm_start_type startup", "-namelist '&a irrigate=.false./' -crop -bgc bgc",
"-res 0.9x1.25 -infile myuser_nl_clm",
"-res 0.9x1.25 -ignore_ic_date -clm_start_type branch -namelist '&a nrevsn=\"thing.nc\"/' -bgc bgc -crop",
@@ -335,13 +340,7 @@ sub cat_and_create_namelistinfile {
my $file = $startfile;
&make_env_run();
my $base_options = "-envxml_dir . -driver $driver";
- if ( $driver eq "mct" ) {
- $base_options = "$base_options -lnd_frac $DOMFILE";
- # Skip the MCT test for excess ice streams
- if ( $options =~ /use_excess_ice_streams=.true./ ) {
- next;
- }
- } else {
+ if ( $driver eq "nuopc" ) {
$base_options = "$base_options -namelist '&a force_send_to_atm = .false./'";
}
eval{ system( "$bldnml $base_options $options > $tempfile 2>&1 " ); };
@@ -420,17 +419,17 @@ sub cat_and_create_namelistinfile {
$mode = "-phys $phys";
&make_config_cache($phys);
foreach my $options (
- "-res ne0np4.ARCTIC.ne30x4 -bgc sp -use_case 2000_control -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res ne0np4.ARCTICGRIS.ne30x8 -bgc sp -use_case 1850_control -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res 1.9x2.5 -bgc sp -use_case 20thC_transient -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res 0.9x1.25 -bgc sp -use_case 20thC_transient -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res 0.9x1.25 -bgc bgc -crop -use_case 20thC_transient -namelist '&a start_ymd=19500101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res ne0np4CONUS.ne30x8 -bgc sp -use_case 2000_control -namelist '&a start_ymd=20130101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res 1.9x2.5 -bgc sp -use_case 20thC_transient -namelist '&a start_ymd=20030101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res 1.9x2.5 -bgc sp -use_case 2010_control -namelist '&a start_ymd=20100101/' -lnd_tuning_mode ${phys}_cam6.0",
+ "-res ne0np4.ARCTIC.ne30x4 -bgc sp -use_case 2000_control -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res ne0np4.ARCTICGRIS.ne30x8 -bgc sp -use_case 1850_control -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res 1.9x2.5 -bgc sp -use_case 20thC_transient -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res 0.9x1.25 -bgc sp -use_case 20thC_transient -namelist '&a start_ymd=19790101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res 0.9x1.25 -bgc bgc -crop -use_case 20thC_transient -namelist '&a start_ymd=19500101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res ne0np4CONUS.ne30x8 -bgc sp -use_case 2000_control -namelist '&a start_ymd=20130101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res 1.9x2.5 -bgc sp -use_case 20thC_transient -namelist '&a start_ymd=20030101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res 1.9x2.5 -bgc sp -use_case 2010_control -namelist '&a start_ymd=20100101/' -lnd_tuning_mode ${phys}_cam7.0",
"-res 1x1_brazil -no-megan -use_case 2000_control -lnd_tuning_mode ${phys}_CRUv7",
- "-res C96 -bgc sp -use_case 2010_control -namelist '&a start_ymd=20100101/' -lnd_tuning_mode ${phys}_cam6.0",
- "-res ne0np4.ARCTIC.ne30x4 -bgc sp -use_case 2000_control -namelist '&a start_ymd=20130101/' -lnd_tuning_mode ${phys}_cam6.0",
+ "-res C96 -bgc sp -use_case 2010_control -namelist '&a start_ymd=20100101/' -lnd_tuning_mode ${phys}_cam7.0",
+ "-res ne0np4.ARCTIC.ne30x4 -bgc sp -use_case 2000_control -namelist '&a start_ymd=20130101/' -lnd_tuning_mode ${phys}_cam7.0",
) {
&make_env_run();
eval{ system( "$bldnml -envxml_dir . $options > $tempfile 2>&1 " ); };
@@ -548,11 +547,6 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm5_0",
},
- "exice stream on, but mct" =>{ options=>"--res 0.9x1.25 --envxml_dir . --driver mct --lnd_frac $DOMFILE ",
- namelst=>"use_excess_ice=.true., use_excess_ice_streams=.true.",
- GLC_TWO_WAY_COUPLING=>"FALSE",
- phys=>"clm5_0",
- },
"clm50CNDVwtransient" =>{ options=>" -envxml_dir . -use_case 20thC_transient -dynamic_vegetation -res 10x15 -ignore_warnings",
namelst=>"",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -898,16 +892,6 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm5_0",
},
- "both lnd_frac and on nml" =>{ options=>"-driver mct -lnd_frac $DOMFILE -envxml_dir .",
- namelst=>"fatmlndfrc='frac.nc'",
- GLC_TWO_WAY_COUPLING=>"FALSE",
- phys=>"clm5_0",
- },
- "lnd_frac set to UNSET" =>{ options=>"-driver mct -lnd_frac UNSET -envxml_dir .",
- namelst=>"",
- GLC_TWO_WAY_COUPLING=>"FALSE",
- phys=>"clm6_0",
- },
"lnd_frac set but nuopc" =>{ options=>"-driver nuopc -lnd_frac $DOMFILE -envxml_dir .",
namelst=>"",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -923,11 +907,6 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm6_0",
},
- "force_send but not nuopc" =>{ options=>"-driver mct -lnd_frac $DOMFILE -envxml_dir .",
- namelst=>"force_send_to_atm = .false.",
- GLC_TWO_WAY_COUPLING=>"FALSE",
- phys=>"clm6_0",
- },
"branch but NO nrevsn" =>{ options=>"-clm_start_type branch -envxml_dir .",
namelst=>"",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -948,6 +927,76 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"TRUE",
phys=>"clm4_5",
},
+ "matrixWOBGC" =>{ options=>"-envxml_dir . -bgc sp",
+ namelst=>"use_matrixcn=.true.",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "soilmatrixWOBGC" =>{ options=>"-envxml_dir . -bgc sp",
+ namelst=>"use_soil_matrixcn=T",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "soilmatrixWmimics" =>{ options=>"-envxml_dir . -bgc bgc",
+ namelst=>"use_soil_matrixcn=T,soil_decomp_method='MIMICSWieder2015'",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "matrixcn_diagWOmatrix" =>{ options=>"-envxml_dir . -bgc bgc",
+ namelst=>"use_soil_matrixcn=.false.,use_matrixcn=F,hist_wrt_matrixcn_diag=T",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "spinupWOsoilmatrix" =>{ options=>"-envxml_dir . -bgc bgc",
+ namelst=>"use_soil_matrixcn=F,use_matrixcn=T,spinup_matrixcn=T",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "sasuspinupWOsoilmatx" =>{ options=>"-envxml_dir . -bgc bgc -clm_accelerated_spinup sasu",
+ namelst=>"use_soil_matrixcn=.false.,use_matrixcn=.false.",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_1",
+ },
+ "sasuspinupWOCN" =>{ options=>"-envxml_dir . -bgc sp -clm_accelerated_spinup sasu",
+ namelst=>"",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_1",
+ },
+ "nyrforceWOspinup" =>{ options=>"-envxml_dir . -bgc bgc -clm_accelerated_spinup sasu",
+ namelst=>"use_matrixcn=.false.,spinup_matrixcn=F,nyr_forcing=20",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "nyrsasuGTnyrforce" =>{ options=>"-envxml_dir . -bgc bgc -clm_accelerated_spinup sasu",
+ namelst=>"use_matrixcn=.false.,spinup_matrixcn=T,nyr_forcing=20,nyr_sasu=21",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "iloopZero" =>{ options=>"-envxml_dir . -bgc bgc -clm_accelerated_spinup sasu",
+ namelst=>"use_matrixcn=.false.,spinup_matrixcn=T,iloop_avg=0",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "matrixspinupWADmode" =>{ options=>"-envxml_dir . -bgc bgc -clm_accelerated_spinup sasu",
+ namelst=>"spinup_matrixcn=T,spinup_state=2",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "matrixspinupWclmaccell" =>{ options=>"-envxml_dir . -bgc bgc -clm_accelerated_spinup off",
+ namelst=>"use_soil_matrixcn=T,spinup_matrixcn=T",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "fatesWuse_cnmatrix" =>{ options=>"-envxml_dir . -bgc fates",
+ namelst=>"use_matrixcn=.true.",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
+ "fatesWuse_soilcnmatrix" =>{ options=>"-envxml_dir . -bgc fates",
+ namelst=>"use_soil_matrixcn=.true.",
+ GLC_TWO_WAY_COUPLING=>"TRUE",
+ phys=>"clm5_0",
+ },
"useFATESContradict" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
namelst=>"use_fates=.false.",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -1023,8 +1072,18 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm4_5",
},
+ "usespitfireusefatessp" =>{ options=>"-envxml_dir . --bgc fates",
+ namelst=>"fates_spitfire_mode=1,use_fates_sp=.true.",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "usefatesspusefateshydro" =>{ options=>"-envxml_dir . --bgc fates",
+ namelst=>"use_fates_sp=.true.,use_fates_planthydro=.true.",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
"useloggingButNOTFATES" =>{ options=>"-envxml_dir . -no-megan",
- namelst=>"use_fates_logging=.true.",
+ namelst=>"fates_harvest_mode='event_code'",
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm4_5",
},
@@ -1038,11 +1097,16 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm4_5",
},
- "useinventorybutnotfile" =>{ options=>"--res 0.9x1.25 --bgc fates --envxml_dir . --no-megan",
+ "useFATESLUH2butnotfile" =>{ options=>"--res 0.9x1.25 --bgc fates --envxml_dir . --no-megan",
namelst=>"use_fates_luh=.true.",
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm4_5",
},
+ "useFATESLUPFTbutnotfile" =>{ options=>"--res 0.9x1.25 --bgc fates --envxml_dir . --no-megan",
+ namelst=>"use_fates_lupft=.true.",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm4_5",
+ },
"inventoryfileDNE" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
namelst=>"use_fates_luh=.true., fluh_timeseries='zztop'",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -1068,6 +1132,36 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm5_0",
},
+ "useFATESSPwithLUH" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
+ namelst=>"use_fates_sp=T,use_fates_luh=T",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "useFATESPOTVEGwithHARVEST" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
+ namelst=>"use_fates_potentialveg=T,fates_harvest_mode='event_code',use_fates_luh=T",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "useFATESHARVEST3WOLUH" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
+ namelst=>"use_fates_luh=F,fates_harvest_mode='luhdata_area'",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "useFATESLUPFTWOLUH" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
+ namelst=>"use_fates_lupft=T,use_fates_luh=F",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "useFATESLUPFTWONOCOMP" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
+ namelst=>"use_fates_lupft=T,use_fates_nocomp=F",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "useFATESLUPFTWOFBG" =>{ options=>"-bgc fates -envxml_dir . -no-megan",
+ namelst=>"use_fates_lupft=T,use_fates_fixedbiogeog=F",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
"useFATESTRANSWdynPFT" =>{ options=>"-bgc fates -envxml_dir . -use_case 20thC_transient -no-megan",
namelst=>"do_transient_pfts=T",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -1078,11 +1172,6 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm5_0",
},
- "useHYDSTwithdynroot" =>{ options=>"-bgc bgc -envxml_dir . -megan",
- namelst=>"use_hydrstress=.true., use_dynroot=.true.",
- GLC_TWO_WAY_COUPLING=>"FALSE",
- phys=>"clm5_0",
- },
"specWOfireemis" =>{ options=>"-envxml_dir . -no-fire_emis",
namelst=>"fire_emis_specifier='bc_a1 = BC'",
GLC_TWO_WAY_COUPLING=>"FALSE",
@@ -1322,7 +1411,13 @@ sub cat_and_create_namelistinfile {
&make_config_cache($warntest{$key}{"phys"});
my $options = $warntest{$key}{"options"};
my $namelist = $warntest{$key}{"namelst"};
- &make_env_run( GLC_TWO_WAY_COUPLING=>$warntest{$key}{"GLC_TWO_WAY_COUPLING"} );
+ my %settings;
+ foreach my $xmlvar ( "GLC_TWO_WAY_COUPLING" ) {
+ if ( defined($failtest{$key}{$xmlvar}) ) {
+ $settings{$xmlvar} = $failtest{$key}{$xmlvar};
+ }
+ }
+ &make_env_run( %settings );
eval{ system( "$bldnml $options -namelist \"&clmexp $namelist /\" > $tempfile 2>&1 " ); };
isnt( $?, 0, $key );
system( "cat $tempfile" );
@@ -1411,30 +1506,166 @@ sub cat_and_create_namelistinfile {
}
print "\n==================================================\n";
-print " Rest all use-cases \n";
+print " Test all use-cases over all physics options\n";
print "==================================================\n";
-# Run over all use-cases...
+# Run over all use-cases for f09 and all physics...
my $list = `$bldnml -use_case list 2>&1 | grep "use case"`;
my @usecases;
if ( $list =~ /build-namelist : use cases : (.+)$/ ) {
- my @usecases = split( / /, $list );
+ @usecases = split( / /, $1 );
} else {
die "ERROR:: Trouble getting list of use-cases\n";
}
-foreach my $usecase ( @usecases ) {
- $options = "-use_case $usecase -envxml_dir .";
+if ( $#usecases != 15 ) {
+ print "use-cases = @usecases\n";
+ die "ERROR:: Number of use-cases isn't what's expected\n";
+}
+my @expect_fails = ( "1850-2100_SSP5-3.4_transient", "1850-2100_SSP4-3.4_transient", "2018-PD_transient", "1850-2100_SSP1-1.9_transient",
+ "1850-2100_SSP4-6.0_transient", "2018_control" );
+foreach my $phys ( "clm4_5", "clm5_0", "clm5_1", "clm6_0" ) {
+ print "physics = $phys\n";
+ &make_config_cache($phys);
+ foreach my $usecase ( @usecases ) {
+ print "usecase = $usecase\n";
+ $options = "-res 0.9x1.25 -use_case $usecase -envxml_dir .";
+ &make_env_run();
+ my $expect_fail = undef;
+ foreach my $failusecase ( @expect_fails ) {
+ if ( $failusecase eq $usecase ) {
+ $expect_fail = 1;
+ last;
+ }
+ }
+ eval{ system( "$bldnml $options > $tempfile 2>&1 " ); };
+ if ( ! defined($expect_fail) ) {
+ is( $@, '', "options: $options" );
+ $cfiles->checkfilesexist( "$options", $mode );
+ $cfiles->shownmldiff( "default", "standard" );
+ if ( defined($opts{'compare'}) ) {
+ $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode );
+ $cfiles->comparefiles( "$options", $mode, $opts{'compare'} );
+ }
+ if ( defined($opts{'generate'}) ) {
+ $cfiles->copyfiles( "$options", $mode );
+ }
+ } else {
+ isnt( $@, 0, "options: $options" );
+ }
+ &cleanup();
+ }
+}
+
+print "\n=======================================================================================\n";
+print " Test the seperate initial condition files, for ones not tested elsewhere\n";
+print "=========================================================================================\n";
+
+my %finidat_files = (
+ "f091850Clm45BgcGSW" =>{ phys =>"clm4_5",
+ atm_forc=>"GSWP3v1",
+ res => "0.9x1.25",
+ bgc => "bgc",
+ crop => "--no-crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=T",
+ },
+ "f091850Clm45BgcCRU" =>{ phys =>"clm4_5",
+ atm_forc=>"CRUv7",
+ res => "0.9x1.25",
+ bgc => "bgc",
+ crop => "--no-crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=T",
+ },
+ "f091850Clm45BgcCAM6" =>{ phys =>"clm4_5",
+ atm_forc=>"cam6.0",
+ res => "0.9x1.25",
+ bgc => "bgc",
+ crop => "--crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=F",
+ },
+ "f091850Clm50BgcGSW" =>{ phys =>"clm5_0",
+ atm_forc=>"GSWP3v1",
+ res => "0.9x1.25",
+ bgc => "bgc",
+ crop => "--crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=F",
+ },
+ "f091850Clm50SpGSW" =>{ phys =>"clm5_0",
+ atm_forc=>"GSWP3v1",
+ res => "0.9x1.25",
+ bgc => "sp",
+ crop => "--no-crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=T",
+ },
+ "f091850Clm50BgcCRU" =>{ phys =>"clm5_0",
+ atm_forc=>"CRUv7",
+ res => "0.9x1.25",
+ bgc => "bgc",
+ crop => "--crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=F",
+ },
+ "f091850Clm50SpCRU" =>{ phys =>"clm5_0",
+ atm_forc=>"CRUv7",
+ res => "0.9x1.25",
+ bgc => "sp",
+ crop => "--no-crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=T",
+ },
+ "f091850Clm50BgcCAM6" =>{ phys =>"clm5_0",
+ atm_forc=>"cam6.0",
+ res => "0.9x1.25",
+ bgc => "bgc",
+ crop => "--crop",
+ use_case => "1850_control",
+ start_ymd => "18500101",
+ namelist => "irrigate=F",
+ },
+ );
+
+foreach my $key ( keys(%finidat_files) ) {
+ print( "$key\n" );
+ my $phys = $finidat_files{$key}{'phys'};
+ print "physics = $phys\n";
+ &make_config_cache($phys);
+ my $usecase = $finidat_files{$key}{'use_case'};
+ my $bgc = $finidat_files{$key}{'bgc'};
+ my $res = $finidat_files{$key}{'res'};
+ my $crop = $finidat_files{$key}{'crop'};
+ my $namelist = $finidat_files{$key}{'namelist'};
+ my $start_ymd = $finidat_files{$key}{'start_ymd'};
+ my $lnd_tuning_mode = "${phys}_" . $finidat_files{$key}{'atm_forc'};
+ $options = "-bgc $bgc -res $res -use_case $usecase -envxml_dir . $crop --lnd_tuning_mode $lnd_tuning_mode " .
+ "-namelist '&a start_ymd=$start_ymd, $namelist/'";
&make_env_run();
eval{ system( "$bldnml $options > $tempfile 2>&1 " ); };
is( $@, '', "options: $options" );
+ my $finidat = `grep finidat lnd_in`;
+ if ( $finidat =~ /initdata_map/ ) {
+ my $result;
+ eval( $result = `grep use_init_interp lnd_in` );
+ is ( $result =~ /.true./, 1, "use_init_interp needs to be true here: $result");
+ }
$cfiles->checkfilesexist( "$options", $mode );
$cfiles->shownmldiff( "default", "standard" );
if ( defined($opts{'compare'}) ) {
- $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode );
- $cfiles->comparefiles( "$options", $mode, $opts{'compare'} );
+ $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode );
+ $cfiles->comparefiles( "$options", $mode, $opts{'compare'} );
}
if ( defined($opts{'generate'}) ) {
- $cfiles->copyfiles( "$options", $mode );
+ $cfiles->copyfiles( "$options", $mode );
}
&cleanup();
}
@@ -1501,7 +1732,7 @@ sub cat_and_create_namelistinfile {
"20thC_transient",
);
foreach my $res ( @glc_res ) {
- foreach my $usecase ( @usecases ) {
+ foreach my $usecase ( @use_cases ) {
my $startymd = undef;
if ( ($usecase eq "1850_control") || ($usecase eq "20thC_transient") ) {
$startymd = 18500101;
@@ -1573,43 +1804,6 @@ sub cat_and_create_namelistinfile {
#
# End loop over versions
#
-#
-# Test ALL SSP's for f09...
-#
-$phys = "clm6_0";
-$mode = "-phys $phys";
-&make_config_cache($phys);
-my $res = "0.9x1.25";
-foreach my $usecase ( "1850-2100_SSP5-8.5_transient", "1850-2100_SSP2-4.5_transient", "1850-2100_SSP1-2.6_transient", "1850-2100_SSP3-7.0_transient" ) {
- $options = "-res $res -bgc bgc -crop -use_case $usecase -envxml_dir . -namelist '&a start_ymd=20150101/'";
- &make_env_run();
- eval{ system( "$bldnml $options > $tempfile 2>&1 " ); };
- is( $@, '', "$options" );
- $cfiles->checkfilesexist( "$options", $mode );
- $cfiles->shownmldiff( "default", "standard" );
- if ( defined($opts{'compare'}) ) {
- $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode );
- $cfiles->comparefiles( "$options", $mode, $opts{'compare'} );
- }
- if ( defined($opts{'generate'}) ) {
- $cfiles->copyfiles( "$options", $mode );
- }
- &cleanup();
-}
-
-# The SSP's that fail because of missing ndep files...
-$phys = "clm5_0";
-$mode = "-phys $phys";
-&make_config_cache($phys);
-my $res = "0.9x1.25";
-foreach my $usecase ( "1850-2100_SSP5-3.4_transient", "1850-2100_SSP4-3.4", "1850-2100_SSP1-1.9_transient",
- "1850-2100_SSP4-6.0_transient" ) {
- $options = "-res $res -bgc bgc -crop -use_case $usecase -envxml_dir . -namelist '&a start_ymd=20150101/'";
- &make_env_run();
- eval{ system( "$bldnml $options > $tempfile 2>&1 " ); };
- isnt( $?, 0, $usecase );
- system( "cat $tempfile" );
-}
print "\n==================================================\n";
print "Test clm4.5/clm5.0/clm5_1/clm6_0 resolutions \n";
@@ -1719,7 +1913,7 @@ sub cat_and_create_namelistinfile {
my $mode = "-phys $phys";
&make_config_cache($phys);
my @forclist = ();
- @forclist = ( "CRUv7", "GSWP3v1", "cam6.0", "cam5.0", "cam4.0" );
+ @forclist = ( "CRUv7", "GSWP3v1", "cam7.0", "cam6.0", "cam5.0", "cam4.0" );
foreach my $forc ( @forclist ) {
foreach my $bgc ( "sp", "bgc" ) {
my $lndtuningmode = "${phys}_${forc}";
diff --git a/bld/unit_testers/cmp_baseline_lnd_in_files b/bld/unit_testers/cmp_baseline_lnd_in_files
new file mode 100755
index 0000000000..3c6d84954c
--- /dev/null
+++ b/bld/unit_testers/cmp_baseline_lnd_in_files
@@ -0,0 +1,71 @@
+#!/bin/bash
+#
+# A simple script to compare lnd_in namelists between two baselines on Derecho
+#
+
+#----------------------------------------------------------------------
+# Usage subroutine
+usage() {
+ echo ""
+ echo "***********************************************************************"
+ echo "usage:"
+ echo "./cmp_baseline_lnd_in_files "
+ echo " "
+ echo "Compares lnd_in files between two baselines on Derecho"
+ echo "***********************************************************************"
+}
+
+#----------------------------------------------------------------------
+
+if [ "$#" -ne 2 ]; then
+ echo "Need to give two baseline directories to compare"
+ usage
+ exit 1
+fi
+baseline=$1
+compare=$2
+
+cwd=`pwd`
+if [ -z "$1" ]; then
+ echo "Need to enter a baseline directory tag name"
+ usage
+ exit 1
+fi
+if [ -z "$2" ]; then
+ echo "Need to enter a comparison directory tag name"
+ usage
+ exit 1
+fi
+
+BASELINE_ROOT=/glade/campaign/cgd/tss/ctsm_baselines
+root=$BASELINE_ROOT/$baseline
+if ! test -d "$root"; then
+ echo "Root directory of $root does NOT exist"
+ exit 1
+fi
+comp_root=$BASELINE_ROOT/$compare
+if ! test -d "$comp_root"; then
+ echo "Root comparison directory of $comp_root does NOT exist"
+ usage
+ exit 1
+fi
+cd $root
+filepat="*"
+dirnames=($filepat)
+if [ "${filenames[*]}" = "$filepat" ]; then
+ echo "No directoctories exist in this directory"
+ exit 1
+fi
+for dir in ${dirnames[*]}
+do
+ echo $dir
+ base=$dir/CaseDocs/lnd_in
+ comp=$BASELINE_ROOT/$compare/$dir/CaseDocs/lnd_in
+ if ! test -f "$base"; then
+ echo "$base, does NOT exist, skipping"
+ elif ! test -f "$comp"; then
+ echo "$comp, does NOT exist, skipping"
+ else
+ $cwd/../../cime/CIME/Tools/compare_namelists $base $comp
+ fi
+done
diff --git a/bld/unit_testers/compare_namelists b/bld/unit_testers/compare_namelists
new file mode 100755
index 0000000000..0d0168b316
--- /dev/null
+++ b/bld/unit_testers/compare_namelists
@@ -0,0 +1,115 @@
+#! /bin/bash
+# Compare namelists made by the unit-tester to either themselves (for different physics versions)
+# or to a baseline also made by the unit-tester
+#
+
+#----------------------------------------------------------------------
+# Usage subroutine
+usage() {
+ echo ""
+ echo "***********************************************************************"
+ echo "usage:"
+ echo "./compare_namelists "
+ echo ""
+ echo "valid options: "
+ echo "[-h|--help] "
+ echo " Displays this help message"
+ echo "[-v|--verbose] "
+ echo " Run in verbose mode"
+ echo "[-pa|--physicsA ] "
+ echo " Namelists of first physics version for comparison"
+ echo " (in baseline directory)"
+ echo "[-pb|--physicsB ] "
+ echo " Namelists of second physics version to compare to the first one"
+ echo " (in current directory)"
+ echo "[-b|--baseline ] "
+ echo " Baseline directory to compare to (default current directory)"
+ echo " "
+ echo "NOTE: When --physicsA or --physicsB is chosen you must also set the other"
+ echo "***********************************************************************"
+}
+
+
+# Current working directory: the location of this script
+cwd=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+# Default settings
+verbose="No"
+PHYSA="all"
+PHYSB="all"
+baseline="."
+
+# Parse command-line options
+while [ $# -gt 0 ]; do
+ case $1 in
+ -h|--help )
+ usage
+ exit 0
+ ;;
+ -v|--verbose )
+ verbose="YES"
+ ;;
+ -pa|--physicsA )
+ PHYSA=$2
+ shift
+ ;;
+ -pb|--physicsB )
+ PHYSB=$2
+ shift
+ ;;
+ -b|--baseline )
+ baseline=$2
+ shift
+ ;;
+ * )
+ echo "ERROR:: invalid argument sent in: $2"
+ usage
+ exit 1
+ ;;
+ esac
+ shift
+done
+if [ "$PHYSA" = "all" ] && [ "$PHYSB" != "all" ]; then
+ echo "When setting --physicsB you MUST also set --physicsA"
+ echo "$PHYSA $PHYSB"
+ exit 1
+fi
+if [ "$PHYSB" = "all" ] && [ "$PHYSA" != "all" ]; then
+ echo "When setting --physicsA you MUST also set --physicsB"
+ exit 1
+fi
+
+if [ "$verbose" = "YES" ]; then echo "Get list of lnd_in.* files"; fi
+if [ "$verbose" = "YES" ]; then pwd; fi
+filepat="lnd_in.*"
+filenames=($filepat)
+if [ "$verbose" = "YES" ]; then echo "filename list = ${filenames[*]}"; fi
+if [ "${filenames[*]}" = "$filepat" ]; then
+ echo "No lnd_in files exist in the current directory -- run ./build-namelist_test.pl first"
+ exit 1
+fi
+for file in ${filenames[*]}
+do
+ fileA="$baseline/$file"
+ fileB="./$file"
+ # If physicsA option used and the file matches the physics input
+ if [[ "$fileA" =~ "-phys+$PHYSA" ]] || [ "$PHYSA" = "all" ]; then
+ # Replace physicsB for fileB
+ if [ ! "$PHYSA" = "all" ]; then
+ fileB=${fileB/-phys+$PHYSA/-phys+$PHYSB}
+ fi
+ if ! test -f $fileB; then
+ if [ "$verbose" = "YES" ]; then echo "$fileB does NOT exist -- skipping"; fi
+ else
+ if [ "$verbose" = "YES" ]; then echo "Compare namelists for $file"; fi
+ if [ "$fileA" = "$fileB" ]; then
+ echo "Comparing files to themself:"
+ echo "fileA = $fileA"
+ echo "fileB = $fileB"
+ exit 1
+ fi
+ $cwd/../../cime/CIME/Tools/compare_namelists $fileA $fileB
+ fi
+ fi
+done
+if [ "$verbose" = "YES" ]; then echo "Successfully ran through lnd_in files to compare"; fi
diff --git a/bld/unit_testers/xFail/expectedClmTestFails.xml b/bld/unit_testers/xFail/expectedClmTestFails.xml
index c7cbfee488..380e614ea1 100644
--- a/bld/unit_testers/xFail/expectedClmTestFails.xml
+++ b/bld/unit_testers/xFail/expectedClmTestFails.xml
@@ -20,20 +20,6 @@
-
-
-
-
-
-
-
- goldbach not recognized
- goldbach not recognized
- goldbach not recognized
-
-
-
-
diff --git a/bld/unit_testers/xFail/expectedFail.pm b/bld/unit_testers/xFail/expectedFail.pm
index 9feaa3e38b..067e3a51d8 100755
--- a/bld/unit_testers/xFail/expectedFail.pm
+++ b/bld/unit_testers/xFail/expectedFail.pm
@@ -5,7 +5,7 @@ Documentation for expectedFail.pm
=head1 Overview
The module expectedFail.pm supplies the capability of checking if a failed test is expected to fail.
-It is called directly from either test_driver.sh (for batch and interactive tests) or build-namelist_test.pl.
+It is called directly from build-namelist_test.pl.
Future plans involve integrating this module into cesm tests.
=head1 Use Case
@@ -112,7 +112,7 @@ sub new {
=head1 parseOutput
-parseOutput parsese the output from the build-namelist_test.pl script. It is similar
+parseOutput parses the output from the build-namelist_test.pl script. It is similar
to, but not interchangable with parseOutputCLM.
The only argument is that of the reference variable that contains the information dumped
@@ -223,119 +223,6 @@ sub parseOutput
#
##############################################################################
-=head1 parseOutputCLM
-
-parseOutputCLM parsese the output from the test_driver.sh script. It is similar
-to, but not interchangable with parseOutput.
-
-parseOutputCLM takes one arguments:
- $statFoo-> the name of the td..status file
-
-returns: nothing
-
-=cut
-
-##############################################################################
-#
-##############################################################################
-sub parseOutputCLM
-{
-
- my $report;
- my $testId;
- my @testName={};
- my $testReason;
-
- my ($self, $statFoo) = @_ ;
-
- open(FOO, "< $statFoo"); # open for input
- open(FOO_OUT, "> $statFoo.xFail"); # open for input
-
- my(@reportLines);
-
- while () {
-
- my($line) = $_;
-
- my @outArr=split(/ /,$line);
- if (looks_like_number(@outArr[0])) {
-
- $self->{_numericalTestId}++;
-
- my $num=sprintf("%03d", $self->{_numericalTestId});
- my $totNum=sprintf("%03d", $self->{_totTests});
-
- #_# last element has the pass/fail info.
- chomp(@outArr[-1]);
- my $repPass=substr(@outArr[-1], -4, 4);
-
- if ($DEBUG) {
- print ("xFail::expectedFail::parseOutput @outArr[0] \n");
- print ("xFail::expectedFail::parseOutput @outArr[1] \n");
- print ("xFail::expectedFail::parseOutput @outArr[2] \n");
- print ("xFail::expectedFail::parseOutput @outArr[3] \n");
- print ("xFail::expectedFail::parseOutput @outArr[4] \n");
- print ("xFail::expectedFail::parseOutput @outArr[5] \n");
- print ("xFail::expectedFail::parseOutput @outArr[6] \n");
- print ("xFail::expectedFail::parseOutput @outArr[-1] \n");
- print ("xFail::expectedFail::parseOutput $repPass \n");
- }
-
- my $size = @outArr-1;
- if ($DEBUG) {
- print ("size of line $size \n");
- }
- my $endOfDesc=$size-1;
-
- if ($repPass eq "PASS") {
- $report=$pass;
- $testId=@outArr[1];
- @testName=@outArr[2..$endOfDesc];
-
- my ($retVal,$xFailText)=$self->_searchExpectedFail($testId);
-
- my $testReason=$self->_testNowPassing($testId,$retVal,$xFailText);
-
- #_# print out the test results
- print FOO_OUT ("$num/$totNum <$report> $testReason \n");
-
- } else {
- $testId=@outArr[1];
- my ($retVal,$xFailText)=$self->_searchExpectedFail($testId);
-
- if ($DEBUG) {
- print ("xFail::expectedFail::parseOutput Id $retVal,$xFailText \n");
- }
-
- @testName=@outArr[2..$endOfDesc];
-
- if ($retVal eq "TRUE"){
- #_# found an expected FAIL (xFAIL)
- $report=$xfail;
- $testReason= "";
- } else {
- #_# print a regular FAIL
- $report=$fail;
- $testReason="";
- }
-
- #_# print out the test results
- print FOO_OUT ("$num/$totNum <$report> $testReason \n");
-
- }
-
- } else {
- print FOO_OUT $line;
- }
- }
- close(FOO);
- close(FOO_OUT);
-}
-
-##############################################################################
-#
-##############################################################################
-
=head1 _searchExpectedFail
searches the list of expected fails for a match with testId.
@@ -608,8 +495,6 @@ sub _getTestType
my %testTypes = (
"build-namelist_test.pl" => "namelistTest",
- "test_driver.sh-i" => "clmInteractive",
- "test_driver.sh" => "clmBatch",
"clm-cesm.sh" => "cesm"
);
diff --git a/bld/unit_testers/xFail/wrapClmTests.pl b/bld/unit_testers/xFail/wrapClmTests.pl
deleted file mode 100755
index 28238b9d5d..0000000000
--- a/bld/unit_testers/xFail/wrapClmTests.pl
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env perl
-
-#-# =========================================================================================
-
-=head1 wrapClmTest.pl
-
-=head1 Overview
-
-This is a wrapper script that is called from test_driver.sh for either interactive or batch
-tests. It calls the CTOR for the xFail::expectedFail.pm module and also parses the td*.status
-file to create a new file with xFails listed.
-
-It takes the following arguments:
-
- numberOfTests -> number of tests from test_driver.sh
- statusFile -> name of the td..status file
- callingScript -> name of script calling this. For test_driver.sh it may be one of:
- 1) test_driver.sh-i for interactive tests
- 2) test_driver.sh for batch tests
-
-=head1 Notes
-
-This script may be run standalone which is useful for testing purposes.
-
-=cut
-
-#-# =========================================================================================
-
-use strict;
-use Getopt::Long;
-use English;
-use Cwd;
-use Scalar::Util qw(looks_like_number);
-
-my $DEBUG=0;
-
-sub usage {
- die < 0,
- numberOfTests => undef,
- statusFile => undef,
- callingScript => undef,
- );
-
-GetOptions(
- "h|help" => \$opts{'help'},
- "numberOfTests=s" => \$opts{'numberOfTests'},
- "statusFile=s" => \$opts{'statusFile'},
- "callingScript=s" => \$opts{'callingScript'},
-) or usage();
-
-# Give usage message.
-usage() if $opts{'help'};
-
-my $statFoo = undef;
-my $nTests = undef;
-my $script= undef;
-
-if (defined($opts{'statusFile'})) {
- $statFoo = $opts{'statusFile'};
-}
-if (defined($opts{'numberOfTests'})) {
- $nTests = $opts{'numberOfTests'};
-}
-if (defined($opts{'callingScript'})) {
- $script = $opts{'callingScript'};
-}
-
-my ( $self ) = @_;
-
-#Figure out where configure directory is and where can use the XML/Lite module from
-my $ProgName;
-($ProgName = $PROGRAM_NAME) =~ s!(.*)/!!; # name of program
-my $ProgDir = $1; # name of directory where program lives
-
-my $cwd = getcwd(); # current working directory
-my $cfgdir;
-
-if ($ProgDir) { $cfgdir = $ProgDir; }
-else { $cfgdir = $cwd; }
-
-#-----------------------------------------------------------------------------------------------
-# Add $cfgdir to the list of paths that Perl searches for modules
-#-----------------------------------------------------------------------------------------------
-my @dirs = ( $cfgdir,
- "$cfgdir/../",
- "$cfgdir/../../../../../cime/utils/perl5lib");
-unshift @INC, @dirs;
-my $result = eval "require expectedFail";
-if ( ! defined($result) ) {
- die <<"EOF";
-** Cannot find perl module \"xFail/expectedFail.pm\" from directories: @dirs **
-EOF
-}
-
-#_# ====================================
-#_# setup work complete. Now parse file
-#_# ====================================
-
-if ($DEBUG) {
- print (" wrapClmTests.pl:: calling script $script \n");
- print (" wrapClmTests.pl:: number of tests $nTests \n");
- print (" wrapClmTests.pl:: processing $statFoo \n");
-}
-
-#_# compGen not used for CLM batch or interactive tests, but we use "compare" as the default in this case
-my $compGen="compare";
-my $xFail = xFail::expectedFail->new($script,$compGen,$nTests);
-
-$xFail->parseOutputCLM($statFoo);
-
-exit(0);
diff --git a/ccs_config b/ccs_config
new file mode 160000
index 0000000000..69a958581e
--- /dev/null
+++ b/ccs_config
@@ -0,0 +1 @@
+Subproject commit 69a958581ecd2d32ee9cb1c38bcd3847b8b920bf
diff --git a/cime b/cime
new file mode 160000
index 0000000000..422ddaa770
--- /dev/null
+++ b/cime
@@ -0,0 +1 @@
+Subproject commit 422ddaa770a3cea6e83a60c9700ebce77acaceed
diff --git a/cime_config/SystemTests/lilacsmoke.py b/cime_config/SystemTests/lilacsmoke.py
index 66a94068da..1287301ba2 100644
--- a/cime_config/SystemTests/lilacsmoke.py
+++ b/cime_config/SystemTests/lilacsmoke.py
@@ -407,6 +407,11 @@ def run_phase(self):
# case.get_mpirun_cmd creates a command that runs the executable given by
# case.run_exe. So it's important that (elsewhere in this test script) we create a
# link pointing from that to the atm_driver.exe executable.
+ #
+ # 2024/5/28 slevis: We added the load_env here to replace the
+ # behavior of the PBS -V directive that was removed from
+ # /ccs_config/machines/config_batch.xml
+ self._case.load_env(reset=True)
lid = new_lid()
os.environ["OMP_NUM_THREADS"] = str(self._case.thread_count)
cmd = self._case.get_mpirun_cmd(allow_unresolved_envvars=False)
diff --git a/cime_config/SystemTests/pvt.py b/cime_config/SystemTests/pvt.py
new file mode 100644
index 0000000000..cf923dd334
--- /dev/null
+++ b/cime_config/SystemTests/pvt.py
@@ -0,0 +1,133 @@
+"""
+FATES land use potential vegetation spin up + transient test
+
+This is a FATES specific test:
+
+1) conduct a spinup with use_fates_potentialveg on
+ - write restart file
+ - use CLM_ACCELERATED_SPINUP?
+2) run a transient landuse case with use_fates_lupft
+ - start from the restart file generated in (1)
+"""
+from CIME.XML.standard_module_setup import *
+from CIME.SystemTests.system_tests_common import SystemTestsCommon
+from CIME.SystemTests.test_utils.user_nl_utils import append_to_user_nl_files
+import shutil, glob, os
+
+logger = logging.getLogger(__name__)
+
+
+class PVT(SystemTestsCommon):
+ def __init__(self, case):
+ SystemTestsCommon.__init__(self, case)
+
+ # Do not allow PVT to be run with certain testmods
+ # Should this be targeted to a specific testmod for simplicity for now?
+ # Technically this could be run with the luh fates_harvest_modes
+ error_message = None
+ casebaseid = self._case.get_value("CASEBASEID")
+ casebaseid = casebaseid.split("-")[-1]
+ if casebaseid[0:10] != "FatesLUPFT":
+ error_message = f"Only call PVT with testmod FatesLUPFT. {casebaseid} selected."
+
+ # Only allow to run if resolution is 4x5 for now
+ # Other grid resolutions will be pre-processed and included in the namelist defaults at a future date.
+ # Potentially we could generate these on the fly although doing so would result in increased build time
+ lnd_grid = self._case.get_value("LND_GRID")
+ if lnd_grid != "4x5":
+ error_message = (
+ f"PVT can currently only be run with 4x5 resolution. {lnd_grid} selected."
+ )
+
+ if error_message is not None:
+ logger.error(error_message)
+ raise RuntimeError(error_message)
+
+ def run_phase(self):
+ # -------------------------------------------------------------------
+ # (1) Run FATES spin-up case in potential vegetation mode
+ # -------------------------------------------------------------------
+ orig_case = self._case
+ orig_casevar = self._case.get_value("CASE")
+ caseroot = self._case.get_value("CASEROOT")
+
+ # Set the run start date based on the desired starting reference case year
+ refcase_year = 1700
+ stop_n_pveg = 5
+ startyear_pveg = refcase_year - stop_n_pveg
+
+ # clone the main case to create spinup case
+ logger.info("PVT log: cloning setup")
+ clone_path = "{}.potveg".format(caseroot)
+ if os.path.exists(clone_path):
+ shutil.rmtree(clone_path)
+ logger.info("PVT log: cloning")
+ clone = self._case.create_clone(clone_path, keepexe=True)
+ logger.info("PVT log: cloning complete")
+
+ # setup the clone case
+ os.chdir(clone_path)
+ self._set_active_case(clone)
+
+ # set the clone case values
+ with clone:
+ clone.set_value("CLM_ACCELERATED_SPINUP", "off")
+ clone.set_value("STOP_N", stop_n_pveg)
+ clone.set_value("STOP_OPTION", "nyears")
+ clone.set_value("RUN_STARTDATE", "{}-01-01".format(startyear_pveg))
+
+ # Modify the spin up case to use the potential vegetation mode.
+ # Checks for incompatible cases and necessary mapping files are
+ # handled in the build case.
+ # Turn off fates_harvest_mode for the spin up.
+
+ logger.info("PVT log: modify user_nl_clm file for spin up run")
+ added_content = ["use_fates_potentialveg = .true.", "fates_harvest_mode = 'no_harvest'"]
+ append_to_user_nl_files(clone_path, "clm", added_content)
+
+ # Run the spin up case
+ # As per SSP test:
+ # "No history files expected, set suffix=None to avoid compare error"
+ logger.info("PVT log: starting spin-up run")
+ dout_sr = clone.get_value("DOUT_S_ROOT")
+ self._skip_pnl = False
+ self.run_indv(suffix=None, st_archive=True)
+
+ # -------------------------------------------------------------------
+ # (2) Run FATES transient case using restart file from spin-up
+ # -------------------------------------------------------------------
+ os.chdir(caseroot)
+ self._set_active_case(orig_case)
+
+ # Copy restart files from spin up to the transient case run directory
+ # obtain rpointer files and necessary restart files from short term archiving directory
+ rundir = self._case.get_value("RUNDIR")
+
+ refdate = str(refcase_year) + "-01-01-00000"
+ rest_path = os.path.join(dout_sr, "rest", "{}".format(refdate))
+
+ for item in glob.glob("{}/*{}*".format(rest_path, refdate)):
+ link_name = os.path.join(rundir, os.path.basename(item))
+ if os.path.islink(link_name) and os.readlink(link_name) == item:
+ # Link is already set up correctly: do nothing
+ # (os.symlink raises an exception if you try to replace an
+ # existing file)
+ pass
+ else:
+ os.symlink(item, link_name)
+
+ for item in glob.glob("{}/*rpointer*".format(rest_path)):
+ shutil.copy(item, rundir)
+
+ # Update run case settings
+ self._case.set_value("CLM_ACCELERATED_SPINUP", "off")
+ self._case.set_value("RUN_TYPE", "hybrid")
+ self._case.set_value("GET_REFCASE", False)
+ self._case.set_value("RUN_REFCASE", "{}.potveg".format(orig_casevar))
+ self._case.set_value("RUN_REFDATE", "{}-01-01".format(refcase_year))
+ self._case.set_value("RUN_STARTDATE", "{}-01-01".format(refcase_year))
+ self._case.set_value("DOUT_S", False)
+ self._case.flush()
+
+ # do the restart run (short term archiving is off)
+ self.run_indv()
diff --git a/cime_config/SystemTests/rxcropmaturity.py b/cime_config/SystemTests/rxcropmaturity.py
index acb63bb000..d25bd015ca 100644
--- a/cime_config/SystemTests/rxcropmaturity.py
+++ b/cime_config/SystemTests/rxcropmaturity.py
@@ -20,17 +20,25 @@
from CIME.SystemTests.system_tests_common import SystemTestsCommon
from CIME.XML.standard_module_setup import *
from CIME.SystemTests.test_utils.user_nl_utils import append_to_user_nl_files
+from CIME.case import Case
import shutil, glob
logger = logging.getLogger(__name__)
-class RXCROPMATURITY(SystemTestsCommon):
+class RXCROPMATURITYSHARED(SystemTestsCommon):
def __init__(self, case):
# initialize an object interface to the SMS system test
SystemTestsCommon.__init__(self, case)
- # Ensure run length is at least 5 years. Minimum to produce one complete growing season (i.e., two complete calendar years) actually 4 years, but that only gets you 1 season usable for GDD generation, so you can't check for season-to-season consistency.
+ # Is this a real RXCROPMATURITY test or not?
+ casebaseid = self._case.get_value("CASEBASEID")
+ full_test = "RXCROPMATURITY_" in casebaseid
+ skipgen_test = "RXCROPMATURITYSKIPGEN_" in casebaseid
+
+ # Ensure run length is at least 5 years. Minimum to produce one complete growing season
+ # (i.e., two complete calendar years) actually 4 years, but that only gets you 1 season
+ # usable for GDD generation, so you can't check for season-to-season consistency.
stop_n = self._case.get_value("STOP_N")
stop_option = self._case.get_value("STOP_OPTION")
stop_n_orig = stop_n
@@ -56,11 +64,20 @@ def __init__(self, case):
f"STOP_OPTION ({stop_option_orig}) must be nsecond(s), nminute(s), "
+ "nhour(s), nday(s), nmonth(s), or nyear(s)"
)
- elif stop_n < 5:
+ elif full_test and stop_n < 5:
error_message = (
"RXCROPMATURITY must be run for at least 5 years; you requested "
+ f"{stop_n_orig} {stop_option_orig[1:]}"
)
+ elif skipgen_test and stop_n < 3:
+ # First year is discarded because crops are already in the ground at restart, and those
+ # aren't affected by the new crop calendar inputs. The second year is useable, but we
+ # need a third year so that all crops planted in the second year have a chance to
+ # finish.
+ error_message = (
+ "RXCROPMATURITYSKIPGEN (both-forced part) must be run for at least 3 years; you requested "
+ + f"{stop_n_orig} {stop_option_orig[1:]}"
+ )
if error_message is not None:
logger.error(error_message)
raise RuntimeError(error_message)
@@ -69,7 +86,6 @@ def __init__(self, case):
self._run_Nyears = int(stop_n)
# Only allow RXCROPMATURITY to be called with test cropMonthOutput
- casebaseid = self._case.get_value("CASEBASEID")
if casebaseid.split("-")[-1] != "cropMonthOutput":
error_message = (
"Only call RXCROPMATURITY with test cropMonthOutput "
@@ -81,10 +97,16 @@ def __init__(self, case):
# Get files with prescribed sowing and harvest dates
self._get_rx_dates()
+ # Get cultivar maturity requirement file to fall back on if not generating it here
+ self._gdds_file = None
+ self._fallback_gdds_file = os.path.join(
+ os.path.dirname(self._sdatefile), "gdds_20230829_161011.nc"
+ )
+
# Which conda environment should we use?
self._get_conda_env()
- def run_phase(self):
+ def _run_phase(self, skip_gen=False):
# Modeling this after the SSP test, we create a clone to be the case whose outputs we don't
# want to be saved as baseline.
@@ -133,6 +155,12 @@ def run_phase(self):
# Download files from the server, if needed
case_gddgen.check_all_input_data()
+ # Copy needed file from original to gddgen directory
+ shutil.copyfile(
+ os.path.join(caseroot, ".env_mach_specific.sh"),
+ os.path.join(self._path_gddgen, ".env_mach_specific.sh"),
+ )
+
# Make custom version of surface file
logger.info("RXCROPMATURITY log: run fsurdat_modifier")
self._run_fsurdat_modifier()
@@ -146,9 +174,19 @@ def run_phase(self):
# "No history files expected, set suffix=None to avoid compare error"
# We *do* expect history files here, but anyway. This works.
self._skip_pnl = False
- self.run_indv(suffix=None, st_archive=True)
- self._run_generate_gdds(case_gddgen)
+ # If not generating GDDs, only run a few days of this.
+ if skip_gen:
+ with Case(self._path_gddgen, read_only=False) as case:
+ case.set_value("STOP_N", 5)
+ case.set_value("STOP_OPTION", "ndays")
+
+ self.run_indv(suffix=None, st_archive=True)
+ if skip_gen:
+ # Interpolate an existing GDD file. Needed to check obedience to GDD inputs.
+ self._run_interpolate_gdds()
+ else:
+ self._run_generate_gdds(case_gddgen)
# -------------------------------------------------------------------
# (3) Set up and perform Prescribed Calendars run
@@ -174,7 +212,7 @@ def run_phase(self):
# (4) Check Prescribed Calendars run
# -------------------------------------------------------------------
logger.info("RXCROPMATURITY log: output check: Prescribed Calendars")
- self._run_check_rxboth_run()
+ self._run_check_rxboth_run(skip_gen)
# Get sowing and harvest dates for this resolution.
def _get_rx_dates(self):
@@ -331,11 +369,16 @@ def _create_config_file_evenlysplitcrop(self):
cfg_out.write("PCT_OCEAN = 0.0\n")
cfg_out.write("PCT_URBAN = 0.0 0.0 0.0\n")
- def _run_check_rxboth_run(self):
+ def _run_check_rxboth_run(self, skip_gen):
output_dir = os.path.join(self._get_caseroot(), "run")
- first_usable_year = self._run_startyear + 2
- last_usable_year = self._run_startyear + self._run_Nyears - 2
+
+ if skip_gen:
+ first_usable_year = self._run_startyear + 1
+ last_usable_year = first_usable_year
+ else:
+ first_usable_year = self._run_startyear + 2
+ last_usable_year = self._run_startyear + self._run_Nyears - 2
tool_path = os.path.join(
self._ctsm_root, "python", "ctsm", "crop_calendars", "check_rxboth_run.py"
@@ -386,6 +429,7 @@ def _run_generate_gdds(self, case_gddgen):
hdates_file = self._hdatefile
# It'd be much nicer to call generate_gdds.main(), but I can't import generate_gdds.
+ # See https://github.com/ESCOMP/CTSM/issues/2603
tool_path = os.path.join(
self._ctsm_root, "python", "ctsm", "crop_calendars", "generate_gdds.py"
)
@@ -416,6 +460,31 @@ def _run_generate_gdds(self, case_gddgen):
raise RuntimeError(error_message)
self._gdds_file = generated_gdd_files[0]
+ def _run_interpolate_gdds(self):
+ # File where interpolated GDDs should be saved
+ self._gdds_file = os.path.join(self._get_caseroot(), "interpolated_gdds.nc")
+
+ # It'd be much nicer to call interpolate_gdds.main(), but I can't import interpolate_gdds.
+ # See https://github.com/ESCOMP/CTSM/issues/2603
+ tool_path = os.path.join(
+ self._ctsm_root, "python", "ctsm", "crop_calendars", "interpolate_gdds.py"
+ )
+ command = " ".join(
+ [
+ f"python3 {tool_path}",
+ f"--input-file {self._fallback_gdds_file}",
+ f"--target-file {self._sdatefile}",
+ f"--output-file {self._gdds_file}",
+ "--overwrite",
+ ]
+ )
+ stu.run_python_script(
+ self._get_caseroot(),
+ self._this_conda_env,
+ command,
+ tool_path,
+ )
+
def _get_conda_env(self):
conda_setup_commands = stu.cmds_to_setup_conda(self._get_caseroot())
@@ -442,3 +511,8 @@ def _get_flanduse_timeseries_in(self, case):
if flanduse_timeseries_in:
self._flanduse_timeseries_in = flanduse_timeseries_in.group(1)
break
+
+
+class RXCROPMATURITY(RXCROPMATURITYSHARED):
+ def run_phase(self):
+ self._run_phase()
diff --git a/cime_config/SystemTests/rxcropmaturityskipgen.py b/cime_config/SystemTests/rxcropmaturityskipgen.py
new file mode 100644
index 0000000000..409f2b9847
--- /dev/null
+++ b/cime_config/SystemTests/rxcropmaturityskipgen.py
@@ -0,0 +1,6 @@
+from rxcropmaturity import RXCROPMATURITYSHARED
+
+
+class RXCROPMATURITYSKIPGEN(RXCROPMATURITYSHARED):
+ def run_phase(self):
+ self._run_phase(skip_gen=True)
diff --git a/cime_config/SystemTests/sspmatrixcn.py b/cime_config/SystemTests/sspmatrixcn.py
new file mode 100644
index 0000000000..f4a09a277e
--- /dev/null
+++ b/cime_config/SystemTests/sspmatrixcn.py
@@ -0,0 +1,371 @@
+"""
+
+CTSM only test to do the CN-matrix spinup procedure
+
+This is a CLM specific test:
+Verifies that spinup works correctly
+this test is only valid for CLM compsets
+
+Step 0: Run a AD cold-start with matrix and matrix spinup off
+ Fast mode and fast-mode 2-loop spinup steps are now skipped
+ These were labeled as Step 1 and Step 2.
+Step 3: Run a slow-mode spinup
+Step 4: matrix Spinup off
+"""
+import shutil, glob, os, sys
+
+if __name__ == "__main__":
+ CIMEROOT = os.environ.get("CIMEROOT")
+ if CIMEROOT is None:
+ CIMEROOT = "../../cime"
+
+ sys.path.append(os.path.join(CIMEROOT, "scripts", "lib"))
+ sys.path.append(os.path.join(CIMEROOT, "scripts"))
+else:
+ from CIME.utils import append_testlog
+
+from CIME.XML.standard_module_setup import *
+from CIME.SystemTests.system_tests_common import SystemTestsCommon
+from CIME.SystemTests.test_utils import user_nl_utils
+
+
+logger = logging.getLogger(__name__)
+
+
+class SSPMATRIXCN(SystemTestsCommon):
+
+ # Class data
+ nyr_forcing = 2
+ # Get different integer multiples of the number of forcing years
+ full = nyr_forcing
+ twice = 2 * nyr_forcing
+ thrice = 3 * nyr_forcing
+ # Define the settings that will be used for each step
+ steps = ["0-AD", "1-SASU", "2-norm"]
+ desc = [
+ "Accell-Decomp(AD)-coldstart",
+ "slow-mode Semi-Analytic SpinUp(SASU)",
+ "normal",
+ ]
+ runtyp = ["startup", "hybrid", "branch"]
+ spin = ["on", "sasu", "off"]
+ stop_n = [5, thrice, thrice]
+ cold = [True, False, False]
+ iloop = [-999, -999, -999]
+ sasu = [-999, -999, -999]
+
+ def __init__(self, case=None):
+ """
+ initialize an object interface to the SSPMATRIXCN system test
+ """
+ expect(
+ len(self.steps) == len(self.sasu),
+ "length of steps must be the same as sasu",
+ )
+ expect(
+ len(self.steps) == len(self.spin),
+ "length of steps must be the same as spin",
+ )
+ expect(
+ len(self.steps) == len(self.desc),
+ "length of steps must be the same as desc",
+ )
+ expect(
+ len(self.steps) == len(self.cold),
+ "length of steps must be the same as cold",
+ )
+ expect(
+ len(self.steps) == len(self.runtyp),
+ "length of steps must be the same as runtyp",
+ )
+ expect(
+ len(self.steps) == len(self.iloop),
+ "length of steps must be the same as iloop",
+ )
+ expect(
+ len(self.steps) == len(self.stop_n),
+ "length of steps must be the same as stop_n",
+ )
+
+ if __name__ != "__main__":
+ SystemTestsCommon.__init__(self, case)
+ ystart = int(self._case.get_value("DATM_YR_START"))
+ yend = int(self._case.get_value("DATM_YR_END"))
+ self.comp = self._case.get_value("COMP_LND")
+ else:
+ self._case = None
+ self.comp = "clm"
+ ystart = 2000
+ yend = 2001
+
+ for n in range(len(self.steps)):
+ if n == 0:
+ expect(self.cold[n] == True, "First step MUST be a cold-start")
+ expect(self.runtyp[n] == "startup", "First step MUST be a startup")
+ else:
+ expect(self.cold[n] == False, "Other steps must NOT be a cold-start")
+ expect(self.runtyp[n] != "startup", "Other steps MUST NOT be a startup")
+
+ if self.spin[n] == "sasu":
+ expect(self.cold[n] == False, "SASU step should NOT be a cold-start")
+ if self.sasu[n] != -999:
+ expect(self.sasu[n] > 0, "SASU steps must set SASU cycle")
+ expect(
+ self.sasu[n] <= self.nyr_forcing,
+ "SASU cycles can't be greater than a full forcing cycle",
+ )
+
+ expect(
+ yend - ystart + 1 == self.nyr_forcing,
+ "Number of years run over MUST correspond to nyr_forcing",
+ )
+ self._testname = "SSPMATRIX"
+
+ def check_n(self, n):
+ "Check if n is within range"
+ expect(
+ ((n >= 0) and (n < self.n_steps())),
+ "Step number is out of range = " + str(n),
+ )
+
+ def __logger__(self, n=0):
+ "Log info on this step"
+
+ self.check_n(n)
+ msg = "Step {}: {}: doing a {} run for {} years".format(
+ self.steps[n], self.runtyp[n], self.desc[n], self.stop_n[n]
+ )
+ logger.info(msg)
+ logger.info(" spinup type: {}".format(self.spin[n]))
+ if __name__ != "__main__":
+ append_testlog(msg)
+ if n + 1 < self.n_steps():
+ logger.info(" writing restarts at end of run")
+ logger.info(" short term archiving is on ")
+
+ def n_steps(self):
+ "Total number of steps"
+
+ return len(self.steps)
+
+ def total_years(self):
+ "Total number of years needed to do the full spinup"
+
+ ysum = 0
+ for nyr in self.stop_n:
+ ysum = ysum + nyr
+
+ return ysum
+
+ def append_user_nl(self, caseroot, n=0):
+ "Append needed settings to the user_nl files"
+
+ self.check_n(n)
+ # For all set output to yearly
+ contents_to_append = "hist_nhtfrq = -8760"
+ contents_to_append = contents_to_append + ", hist_mfilt = " + str(self.nyr_forcing)
+ # For all but last step turn extra matrix output to off
+ b4last = self.n_steps() - 1
+ if n < b4last:
+ contents_to_append = contents_to_append + ", hist_wrt_matrixcn_diag = .False."
+ # For matrix spinup steps, set the matrix spinup and other variables associated with it
+ if self.spin[n] == "sasu":
+ contents_to_append = contents_to_append + ", nyr_forcing = " + str(self.nyr_forcing)
+ if self.sasu[n] != -999:
+ contents_to_append = contents_to_append + ", nyr_sasu = " + str(self.sasu[n])
+ if self.iloop[n] != -999:
+ contents_to_append = contents_to_append + ", iloop_avg = " + str(self.iloop[n])
+
+ # For cold start, run with matrix off
+ if self.cold[n]:
+ contents_to_append = contents_to_append + ", use_matrixcn = .False."
+ contents_to_append = contents_to_append + ", use_soil_matrixcn = .False."
+
+ # Always append to the end
+ user_nl_utils.append_to_user_nl_files(
+ caseroot=caseroot, component=self.comp, contents=contents_to_append
+ )
+
+ def run_phase(self):
+ "Run phase"
+
+ caseroot = self._case.get_value("CASEROOT")
+ orig_case = self._case
+ orig_casevar = self._case.get_value("CASE")
+
+ # Get a clone of each step except the last one
+ b4last = self.n_steps() - 1
+ for n in range(b4last):
+ #
+ # Clone the main case, and get it setup for the next step
+ #
+ clone_path = "{}.step{}".format(caseroot, self.steps[n])
+ if os.path.exists(clone_path):
+ shutil.rmtree(clone_path)
+ if n > 0:
+ del clone
+ self._set_active_case(orig_case)
+ clone = self._case.create_clone(clone_path, keepexe=True)
+ os.chdir(clone_path)
+ self._set_active_case(clone)
+
+ self.__logger__(n)
+
+ with clone:
+ clone.set_value("RUN_TYPE", self.runtyp[n])
+ clone.set_value("STOP_N", self.stop_n[n])
+
+ clone.set_value("CLM_ACCELERATED_SPINUP", self.spin[n])
+
+ if self.cold[n]:
+ clone.set_value("CLM_FORCE_COLDSTART", "on")
+ else:
+ clone.set_value("CLM_FORCE_COLDSTART", "off")
+
+ self.append_user_nl(clone_path, n)
+
+ dout_sr = clone.get_value("DOUT_S_ROOT")
+
+ self._skip_pnl = False
+ #
+ # Start up from the previous case
+ #
+ rundir = clone.get_value("RUNDIR")
+ with clone:
+ if n > 0:
+ clone.set_value("GET_REFCASE", False)
+ expect("refcase" in locals(), "refcase was NOT previously set")
+ clone.set_value("RUN_REFCASE", refcase)
+ expect("refdate" in locals(), "refdate was NOT previously set")
+ clone.set_value("RUN_STARTDATE", refdate)
+ clone.set_value("RUN_REFDATE", refdate)
+ for item in glob.glob("{}/*{}*".format(rest_path, refdate)):
+ linkfile = os.path.join(rundir, os.path.basename(item))
+ if os.path.exists(linkfile):
+ os.remove(linkfile)
+ if not os.path.isdir(rundir):
+ os.makedirs(rundir)
+ os.symlink(item, linkfile)
+
+ for item in glob.glob("{}/*rpointer*".format(rest_path)):
+ shutil.copy(item, rundir)
+
+ #
+ # Run the case (Archiving on)
+ #
+ self._case.flush()
+ self.run_indv(suffix="step{}".format(self.steps[n]), st_archive=True)
+
+ #
+ # Get the reference case from this step for the next step
+ #
+ refcase = clone.get_value("CASE")
+ refdate = run_cmd_no_fail(
+ r'ls -1dt {}/rest/*-00000* | head -1 | sed "s/-00000.*//" | sed "s/^.*rest\///"'.format(
+ dout_sr
+ )
+ )
+ refsec = "00000"
+ rest_path = os.path.join(dout_sr, "rest", "{}-{}".format(refdate, refsec))
+
+ #
+ # Last step in original case
+ #
+ n = self.n_steps() - 1
+ #
+ # Setup the case to run from the previous clone step
+ #
+ os.chdir(caseroot)
+ self._set_active_case(orig_case)
+ self.__logger__(n)
+ self._case.set_value("DOUT_S", False)
+ self._case.set_value("RUN_TYPE", self.runtyp[n])
+ self._case.set_value("STOP_N", self.stop_n[n])
+ rundir = self._case.get_value("RUNDIR")
+ self._case.set_value("GET_REFCASE", False)
+ expect("refcase" in locals(), "refcase was NOT previously set")
+ self._case.set_value("RUN_REFCASE", refcase)
+ expect("refdate" in locals(), "refdate was NOT previously set")
+ self._case.set_value("RUN_REFDATE", refdate)
+ self._case.set_value("RUN_STARTDATE", refdate)
+ for item in glob.glob("{}/*{}*".format(rest_path, refdate)):
+ linkfile = os.path.join(rundir, os.path.basename(item))
+ if os.path.exists(linkfile):
+ os.remove(linkfile)
+ os.symlink(item, linkfile)
+
+ for item in glob.glob("{}/*rpointer*".format(rest_path)):
+ shutil.copy(item, rundir)
+
+ self.append_user_nl(clone_path, n)
+ #
+ # Don't need to set COLDSTART or ACCEL_SPINUP
+ #
+
+ #
+ # Run the case (short term archiving is off)
+ #
+ self._case.flush()
+ self.run_indv(suffix="step{}".format(self.steps[n]), st_archive=False)
+
+
+#
+# Unit testing for above
+#
+import unittest
+from CIME.case import Case
+from CIME.utils import _LessThanFilter
+from argparse import RawTextHelpFormatter
+
+
+class test_ssp_matrixcn(unittest.TestCase):
+ def setUp(self):
+ self.ssp = SSPMATRIXCN()
+
+ def test_logger(self):
+ # Test the logger
+ stream_handler = logging.StreamHandler(sys.stdout)
+ logger.addHandler(stream_handler)
+ logger.level = logging.DEBUG
+ logger.info("nyr_forcing = {}".format(self.ssp.nyr_forcing))
+ for n in range(self.ssp.n_steps()):
+ self.ssp.__logger__(n)
+ if self.ssp.spin[n] == "sasu":
+ logger.info(" SASU spinup is .true.")
+ if self.ssp.sasu[n] != -999:
+ logger.info(" nyr_sasu = {}".format(self.ssp.sasu[n]))
+ if self.ssp.iloop[n] != -999:
+ logger.info(" iloop_avg = {}".format(self.ssp.iloop[n]))
+
+ logger.info("Total number of years {}".format(self.ssp.total_years()))
+ logger.removeHandler(stream_handler)
+
+ def test_n_steps(self):
+ self.assertTrue(self.ssp.n_steps() == 3)
+
+ def test_valid_n(self):
+ for n in range(self.ssp.n_steps()):
+ self.ssp.check_n(n)
+
+ def test_negative_n(self):
+ self.assertRaises(SystemExit, self.ssp.check_n, -1)
+
+ def test_n_too_big(self):
+ self.assertRaises(SystemExit, self.ssp.check_n, self.ssp.n_steps())
+
+ def test_append_user_nl_step2(self):
+ ufile = "user_nl_clm"
+ if not os.path.exists(ufile):
+ os.mknod(ufile)
+ else:
+ expect(0, ufile + " file already exists, not overwritting it")
+
+ self.ssp.append_user_nl(caseroot=".", n=2)
+ print(ufile + " for step 2")
+ log = open(ufile, "r").read()
+ print(log)
+ os.remove(ufile)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/cime_config/SystemTests/systemtest_utils.py b/cime_config/SystemTests/systemtest_utils.py
index c5ac986abd..c252f73251 100644
--- a/cime_config/SystemTests/systemtest_utils.py
+++ b/cime_config/SystemTests/systemtest_utils.py
@@ -2,7 +2,8 @@
Reduce code duplication by putting reused functions here.
"""
-import os, subprocess
+import os, subprocess, re, glob
+from collections import OrderedDict
def cmds_to_setup_conda(caseroot):
@@ -84,3 +85,26 @@ def run_python_script(caseroot, this_conda_env, command_in, tool_path):
except:
print(f"ERROR trying to run {tool_name}.")
raise
+
+
+# Read a user_nl file and return the namelist option if found
+def find_user_nl_option(caseroot, component, namelist_option):
+
+ # This is a copy of the CIME _get_list_of_user_nl_files
+ # which could be used if this moved into the CIME project
+ file_pattern = "user_nl_" + component + "*"
+ file_list = glob.glob(os.path.join(caseroot, file_pattern))
+
+ # Check that there is at least one file
+ if len(file_list) == 0:
+ raise RuntimeError("No user_nl files found for component " + component)
+
+ # Read through the file list and look for a match and return the whole entry
+ output = OrderedDict()
+ for one_file in file_list:
+ with open(one_file, "r") as user_nl_file:
+ user_nl_text = user_nl_file.read()
+ reg = rf"{namelist_option}.*?(?=,|\n)"
+ find_out = re.findall(reg, user_nl_text)
+ output[one_file] = find_out
+ return output
diff --git a/cime_config/buildnml b/cime_config/buildnml
index 8e93b77015..3eb45dff31 100755
--- a/cime_config/buildnml
+++ b/cime_config/buildnml
@@ -50,6 +50,27 @@ def buildnml(case, caseroot, compname):
clm_force_coldstart = case.get_value("CLM_FORCE_COLDSTART")
lnd_tuning_mode = case.get_value("LND_TUNING_MODE")
clm_accelerated_spinup = case.get_value("CLM_ACCELERATED_SPINUP")
+ comp_interface = case.get_value("COMP_INTERFACE")
+ lilac_mode = case.get_value("LILAC_MODE")
+ if comp_interface == "nuopc":
+ yr_start = case.get_value("DATM_YR_START")
+ yr_end = case.get_value("DATM_YR_END")
+ else:
+ yr_start = case.get_value("DATM_CLMNCEP_YR_START")
+ yr_end = case.get_value("DATM_CLMNCEP_YR_END")
+
+ if yr_start != None and yr_start < 0:
+ yr_start = case.get_value("DATM_CPLHIST_YR_START")
+ yr_end = case.get_value("DATM_CPLHIST_YR_END")
+
+ # For LILAC
+ if yr_start == None or lilac_mode == "on":
+ yr_start = "0"
+ yr_end = "0"
+
+ yr_start = int(yr_start)
+ yr_end = int(yr_end)
+
comp_atm = case.get_value("COMP_ATM")
lnd_grid = case.get_value("LND_GRID")
ninst_lnd = case.get_value("NINST_LND")
@@ -252,7 +273,21 @@ def buildnml(case, caseroot, compname):
tuning = "-lnd_tuning_mode %s " % lnd_tuning_mode
+ #
+ # Spinup settings and specifics for SASU spinup
+ #
spinup = "-clm_accelerated_spinup %s " % clm_accelerated_spinup
+ if clm_accelerated_spinup == "sasu":
+ if (yr_start != None) and (yr_end != None):
+ nyr = yr_end - yr_start + 1
+ if (yr_end <= 0) or (yr_start <= 0):
+ logger.error("ERROR: Year start and end are both negative and should not be")
+ clm_namelist_opts = "nyr_forcing={} {}".format(nyr, clm_namelist_opts)
+ else:
+ logger.warning(
+ "WARNING: It does not make sense to do a SASU spinup with a prognostic atmosphere model"
+ )
+ logger.warning(" as it expects regular atmosphere forcing that is cycled over")
infile = os.path.join(ctsmconf, "namelist")
diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml
index f7adab268f..a7e2a898aa 100644
--- a/cime_config/config_component.xml
+++ b/cime_config/config_component.xml
@@ -78,7 +78,7 @@
UNSET
- clm5_0_cam6.0,clm5_0_cam7.0,clm5.0_cam5.0,clm5.0_cam4.0,clm5_0_GSWP3v1,clm5_0_CRUv7,clm5_0_QIAN,clm5_0_1PT,clm5_0_NLDAS2,clm5_0_ERA5,clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_QIAN,clm4_5_cam6.0,clm4_5_cam7.0,clm4_5_cam5.0,clm4_5_cam4.0,clm4_5_1PT,clm4_5_NLDAS2,clm4_5_ERA5,clm5_1_CRUv7,clm5_1_GSWP3v1,clm5_1_cam6.0,clm5_1_QIAN,clm5_1_1PT,clm5_1_NLDAS2,clm5_1_ERA5,clm6_0_CRUv7,clm6_0_GSWP3v1,clm6_0_cam6.0,clm6_0_cam7.0,clm6_0_cam5.0,clm6_0_cam4.0,clm6_0_QIAN,clm6_0_1PT,clm6_0_NLDAS2,clm6_0_ERA5
+ clm5_0_cam6.0,clm5_0_cam7.0,clm5_0_cam5.0,clm5_0_cam4.0,clm5_0_GSWP3v1,clm5_0_CRUv7,clm5_0_QIAN,clm5_0_1PT,clm5_0_NLDAS2,clm5_0_ERA5,clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_QIAN,clm4_5_cam6.0,clm4_5_cam7.0,clm4_5_cam5.0,clm4_5_cam4.0,clm4_5_1PT,clm4_5_NLDAS2,clm4_5_ERA5,clm5_1_CRUv7,clm5_1_GSWP3v1,clm5_1_cam6.0,clm5_1_QIAN,clm5_1_1PT,clm5_1_NLDAS2,clm5_1_ERA5,clm6_0_CRUv7,clm6_0_GSWP3v1,clm6_0_cam6.0,clm6_0_cam7.0,clm6_0_cam5.0,clm6_0_cam4.0,clm6_0_QIAN,clm6_0_1PT,clm6_0_NLDAS2,clm6_0_ERA5
@@ -87,10 +87,11 @@
clm4_5_CRUv7clm4_5_GSWP3v1clm4_5_cam6.0
- clm4_5_cam4.0
- clm4_5_cam5.0
- clm4_5_cam7.0
- clm4_5_cam6.0
+ clm4_5_cam4.0
+ clm4_5_cam5.0
+ clm4_5_cam6.0
+ clm4_5_cam7.0
+ clm4_5_cam5.0clm4_5_QIANclm4_5_QIANclm4_5_1PT
@@ -102,9 +103,10 @@
clm5_0_GSWP3v1clm5_0_GSWP3v1clm5_0_cam6.0
- clm5_0_cam4.0
- clm5_0_cam5.0
- clm5_0_cam7.0
+ clm5_0_cam4.0
+ clm5_0_cam5.0
+ clm5_0_cam6.0
+ clm5_0_cam7.0clm5_0_cam6.0clm5_0_QIANclm5_0_QIAN
@@ -112,22 +114,24 @@
clm5_0_NLDAS2clm5_0_ERA5
- clm5_1_GSWP3v1clm5_1_GSWP3v1
- clm5_1_cam6.0
- clm5_1_cam4.0
- clm5_1_cam5.0
- clm5_1_cam6.0
+ INVALID_USE_CLM60_NOT_CLM51
+ clm5_1_cam4.0
+ clm5_1_cam5.0
+ clm5_1_cam6.0
+ INVALID_USE_CLM60_NOT_CLM51_FOR_CAM70
+ INVALID_USE_CLM60_NOT_CLM51_FOR_CPLHISTclm6_0_CRUv7clm6_0_CRUv7clm6_0_GSWP3v1clm6_0_GSWP3v1clm6_0_cam6.0
- clm6_0_cam4.0
- clm6_0_cam5.0
- clm6_0_cam7.0
- clm6_0_cam6.0
+ clm6_0_cam4.0
+ clm6_0_cam5.0
+ clm6_0_cam6.0
+ clm6_0_cam7.0
+ clm6_0_cam7.0clm6_0_QIANclm6_0_QIANclm6_0_1PT
@@ -199,23 +203,24 @@
UNSET
- 2010_control
- 2000_control
- 1850_control
+ 2010_control
+ 2000_control
+ 1850_control1850_noanthro_control1850_noanthro_control
- 20thC_transient
- 1850-2100_SSP5-8.5_transient
- 1850-2100_SSP1-2.6_transient
- 1850-2100_SSP3-7.0_transient
- 1850-2100_SSP5-3.4_transient
- 1850-2100_SSP2-4.5_transient
- 1850-2100_SSP1-1.9_transient
- 1850-2100_SSP4-3.4_transient
- 1850-2100_SSP4-6.0_transient
- 1850-2100_SSP5-8.5_transient
- 20thC_transient
- 1850-2100_SSP5-8.5_transient
+ 20thC_transient
+ 1850-2100_SSP5-8.5_transient
+ 1850-2100_SSP1-2.6_transient
+ 1850-2100_SSP3-7.0_transient
+ 1850-2100_SSP5-3.4_transient
+ 1850-2100_SSP2-4.5_transient
+ 1850-2100_SSP2-4.5_transient
+ 1850-2100_SSP1-1.9_transient
+ 1850-2100_SSP4-3.4_transient
+ 1850-2100_SSP4-6.0_transient
+ 1850-2100_SSP5-8.5_transient
+ 20thC_transient
+ 1850-2100_SSP5-8.5_transientrun_component_ctsmenv_run.xml
@@ -292,11 +297,11 @@
char
- on,off
+ on,sasu,offoffrun_component_ctsmenv_run.xml
- Turn on any settings for accellerating the model spinup.
+ Turn on any settings for accellerating the model spinup. SASU is to run the Semi-Analytic Spin-Up with the CN soil matrix method.
diff --git a/cime_config/config_compsets.xml b/cime_config/config_compsets.xml
index 2de911da86..538329e523 100644
--- a/cime_config/config_compsets.xml
+++ b/cime_config/config_compsets.xml
@@ -91,6 +91,8 @@
I2000Clm60Sp2000_DATM%GSWP3v1_CLM60%SP_SICE_SOCN_MOSART_SGLC_SWAV
+
+
@@ -182,6 +184,8 @@
I1850Clm60Sp1850_DATM%GSWP3v1_CLM60%SP_SICE_SOCN_MOSART_SGLC_SWAV
+
+
@@ -291,22 +295,24 @@
I1850Clm60BgcNoAnthro
- 1850_DATM%GSWP3v1_CLM60%BGC-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV
+ 1850_DATM%GSWP3v1_CLM60%BGC-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV
+ I1850Clm60SpNoAnthro
- 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV
+ 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV
+ I1850Clm50BgcNoAnthro
- 1850_DATM%GSWP3v1_CLM50%BGC-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV
+ 1850_DATM%GSWP3v1_CLM50%BGC-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAVI1850Clm50SpNoAnthro
- 1850_DATM%GSWP3v1_CLM50%SP-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV
+ 1850_DATM%GSWP3v1_CLM50%SP-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV
@@ -319,12 +325,15 @@
I1850Clm60SpNoAnthro
- 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV
+ 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV
+ IHistClm60SpHIST_DATM%GSWP3v1_CLM60%SP_SICE_SOCN_MOSART_SGLC_SWAV
+
+
@@ -631,6 +640,17 @@
I1850Clm60BgcCropG1850_DATM%GSWP3v1_CLM60%BGC-CROP_SICE_SOCN_MOSART_CISM2%GRIS-EVOLVE_SWAV
+
+
+
+
+
+
+ IHistClm60BgcCropG
+ HIST_DATM%GSWP3v1_CLM60%BGC-CROP_SICE_SOCN_MOSART_CISM2%GRIS-EVOLVE_SWAV
+
+
+
diff --git a/cime_config/config_tests.xml b/cime_config/config_tests.xml
index c0b6afed9d..12859b9131 100644
--- a/cime_config/config_tests.xml
+++ b/cime_config/config_tests.xml
@@ -123,6 +123,18 @@ This defines various CTSM-specific system tests
$STOP_N
+
+ FATES potential vegetarion spin-up + land use transient run test
+ 1
+ ndays
+ startup
+ 4
+ FALSE
+ FALSE
+ $STOP_OPTION
+ $STOP_N
+
+
Generate prescribed maturity requirements, then test with them1
@@ -133,6 +145,16 @@ This defines various CTSM-specific system tests
$STOP_N
+
+ As RXCROPMATURITY but don't actually generate GDDs. Allows short testing with existing GDD inputs.
+ 1
+ FALSE
+ FALSE
+ never
+ $STOP_OPTION
+ $STOP_N
+
+
-
-
- PEND
- #2460
-
-
-
FAIL
@@ -44,57 +37,50 @@
-
-
+
+ FAIL
- #2444
+ #2619
+ This failure relates to the following REP failure.
-
-
-
+
+ FAIL
- #1887
+ #2619
+ This failure relates to the preceding ERP failure.
-
+ FAIL
- #1733
+ #2444
-
-
+
+ FAIL
- #1733
+ #2542
-
+ FAIL
- #2310
-
-
- FAIL
- #2310
+ #1733
-
-
+
+ FAIL#2310
-
- FAIL
- #2310
-
-
+ FAIL#2310
@@ -179,39 +165,32 @@
-
+ FAIL
- #2321
+ #2653
-
-
+
+ FAIL
- #2373
-
-
- FAIL
- FATES#701
+ FATES#1216
-
-
+
+ FAIL
- FATES#701
+ #2321
-
-
+
+ FAIL#2373
-
-
- FAILFATES#701
@@ -233,7 +212,7 @@
-
+ FAILFATES#1089
@@ -247,31 +226,31 @@
-
-
+
+ FAIL
- #2325
+ #2423
-
+ FAIL#2325
-
+ FAIL#2325
-
+ FAIL
- #2478
+ #2325
@@ -280,13 +259,9 @@
FAIL#2310
-
- FAIL
- #2310
-
-
+
diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml
index 05526b3529..59044feb10 100644
--- a/cime_config/testdefs/testlist_clm.xml
+++ b/cime_config/testdefs/testlist_clm.xml
@@ -10,6 +10,7 @@
prebeta: Run before CESM beta tags (more extensive, but should have tests outside of prealpha) (subset of aux_clm tests)
aux_cime_baselines: CESM cime baselines (subset of aux_clm tests)
hillslope: Experimental test list used for the hillslope option of the model
+ rxcropmaturity: Short tests to be run during development related to prescribed crop calendars
-->
@@ -21,11 +22,19 @@
-
+
+
+
+
+
+
+
+
+
+
-
-
+
@@ -33,10 +42,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
@@ -56,6 +82,7 @@
+
@@ -73,22 +100,22 @@
-
+
-
+
-
+
-
+
@@ -102,7 +129,7 @@
-
+
@@ -111,7 +138,7 @@
-
+
@@ -120,7 +147,7 @@
-
+
@@ -129,7 +156,7 @@
-
+
@@ -138,16 +165,17 @@
-
+
+
-
+
@@ -174,7 +202,7 @@
-
+
@@ -183,7 +211,7 @@
-
+
@@ -192,7 +220,7 @@
-
+
@@ -201,7 +229,7 @@
-
+
@@ -210,7 +238,7 @@
-
+
@@ -219,7 +247,7 @@
-
+
@@ -228,7 +256,7 @@
-
+
@@ -238,7 +266,7 @@
-
-
+
-
+
-
+
+
-
@@ -312,14 +339,14 @@
-
+
-
+
@@ -328,13 +355,29 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
@@ -395,15 +438,6 @@
-
-
-
-
-
-
-
-
-
@@ -413,14 +447,6 @@
-
-
-
-
-
-
-
-
@@ -429,15 +455,15 @@
-
+
-
+
-
+
@@ -445,33 +471,30 @@
-
+
-
+
-
+
-
-
-
-
+
-
+
@@ -485,7 +508,6 @@
-
@@ -501,6 +523,15 @@
+
+
+
+
+
+
+
+
+
@@ -512,8 +543,6 @@
-
-
@@ -521,13 +550,13 @@
-
+
-
-
+
+
@@ -541,7 +570,6 @@
-
@@ -549,6 +577,15 @@
+
+
+
+
+
+
+
+
+
@@ -568,14 +605,14 @@
-
+
-
+
@@ -584,7 +621,7 @@
-
+
@@ -595,7 +632,7 @@
-
+
@@ -604,7 +641,7 @@
-
+
@@ -613,7 +650,7 @@
-
+
@@ -622,7 +659,7 @@
-
+
@@ -631,7 +668,7 @@
-
+
@@ -640,7 +677,7 @@
-
+
@@ -649,7 +686,7 @@
-
+
@@ -658,7 +695,7 @@
-
+
@@ -667,7 +704,7 @@
-
+
@@ -676,7 +713,7 @@
-
+
@@ -685,7 +722,7 @@
-
+
@@ -694,7 +731,7 @@
-
+
@@ -731,14 +768,14 @@
-
+
-
+
@@ -747,12 +784,11 @@
-
+
-
@@ -760,9 +796,17 @@
+
+
+
+
+
+
+
+
+
-
@@ -770,19 +814,82 @@
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
@@ -792,7 +899,6 @@
-
@@ -802,8 +908,6 @@
-
-
@@ -821,7 +925,6 @@
-
@@ -830,8 +933,6 @@
-
-
@@ -841,7 +942,6 @@
-
@@ -886,13 +986,13 @@
-
+
-
+
-
-
+
+
@@ -904,13 +1004,13 @@
-
+
-
+
-
+
@@ -922,9 +1022,17 @@
+
+
+
+
+
+
+
+
+
-
@@ -932,15 +1040,16 @@
-
+
-
+
-
+
+
-
+
@@ -948,12 +1057,9 @@
-
+
-
-
-
-
+
@@ -970,12 +1076,14 @@
-
+
-
+
+
+
@@ -986,48 +1094,49 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
@@ -1038,12 +1147,13 @@
-
+
-
+
+
@@ -1054,12 +1164,13 @@
-
+
-
+
+
@@ -1070,15 +1181,6 @@
-
-
-
-
-
-
-
-
-
@@ -1088,15 +1190,6 @@
-
-
-
-
-
-
-
-
-
@@ -1106,15 +1199,6 @@
-
-
-
-
-
-
-
-
-
@@ -1124,15 +1208,6 @@
-
-
-
-
-
-
-
-
-
@@ -1144,8 +1219,6 @@
-
-
@@ -1163,9 +1236,18 @@
+
+
+
+
+
+
+
+
+
+
-
@@ -1174,8 +1256,6 @@
-
-
@@ -1193,7 +1273,6 @@
-
@@ -1234,6 +1313,15 @@
+
+
+
+
+
+
+
+
+
@@ -1242,21 +1330,21 @@
-
+
+
-
+
-
+
-
-
+
@@ -1268,23 +1356,22 @@
-
+
-
-
+
-
-
+
+
-
+
-
-
+
-
+
+
@@ -1297,14 +1384,6 @@
-
-
-
-
-
-
-
-
@@ -1313,13 +1392,13 @@
-
+
-
+
-
+
@@ -1331,61 +1410,61 @@
-
+
-
+
-
+
-
-
+
-
+
-
+
+
-
+
-
-
+
+
-
+
-
+
-
+
-
-
+
-
-
+
+
+
@@ -1398,16 +1477,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -1418,15 +1487,6 @@
-
-
-
-
-
-
-
-
-
@@ -1436,14 +1496,6 @@
-
-
-
-
-
-
-
-
@@ -1453,15 +1505,6 @@
-
-
-
-
-
-
-
-
-
@@ -1471,16 +1514,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -1491,14 +1524,6 @@
-
-
-
-
-
-
-
-
@@ -1507,15 +1532,6 @@
-
-
-
-
-
-
-
-
-
@@ -1525,16 +1541,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -1545,16 +1551,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -1567,20 +1563,19 @@
-
-
+
-
+
-
+
@@ -1592,7 +1587,7 @@
-
+
@@ -1603,7 +1598,6 @@
-
@@ -1613,7 +1607,6 @@
-
@@ -1623,7 +1616,6 @@
-
@@ -1631,15 +1623,6 @@
-
-
-
-
-
-
-
-
-
@@ -1651,24 +1634,10 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -1676,7 +1645,6 @@
-
@@ -1685,7 +1653,6 @@
-
@@ -1694,8 +1661,6 @@
-
-
@@ -1714,7 +1679,6 @@
-
@@ -1739,6 +1703,15 @@
+
+
+
+
+
+
+
+
+
@@ -1767,9 +1740,18 @@
+
+
+
+
+
+
+
+
+
+
-
@@ -1786,15 +1768,6 @@
-
-
-
-
-
-
-
-
-
@@ -1804,15 +1777,6 @@
-
-
-
-
-
-
-
-
-
@@ -1822,13 +1786,13 @@
-
+
-
+
-
-
+
+
@@ -1842,7 +1806,6 @@
-
@@ -1852,8 +1815,6 @@
-
-
@@ -1864,7 +1825,6 @@
-
@@ -1882,6 +1842,16 @@
+
+
+
+
+
+
+
+
+
+
@@ -1895,7 +1865,6 @@
-
@@ -1903,13 +1872,13 @@
-
+
-
+
-
+
@@ -1921,9 +1890,17 @@
+
+
+
+
+
+
+
+
+
-
@@ -1933,8 +1910,6 @@
-
-
@@ -1943,10 +1918,18 @@
+
+
+
+
+
+
+
+
+
+
-
-
@@ -1956,8 +1939,6 @@
-
-
@@ -1974,14 +1955,14 @@
-
+
-
+
+
-
-
+
@@ -1994,10 +1975,19 @@
+
+
+
+
+
+
+
+
+
+
-
@@ -2007,7 +1997,6 @@
-
@@ -2017,8 +2006,6 @@
-
-
@@ -2026,6 +2013,16 @@
+
+
+
+
+
+
+
+
+
+
@@ -2034,17 +2031,16 @@
-
+
-
+
-
+
@@ -2053,16 +2049,16 @@
-
+
-
+
-
+
@@ -2090,14 +2086,14 @@
-
+
-
+
@@ -2106,12 +2102,11 @@
-
+
-
@@ -2121,7 +2116,6 @@
-
@@ -2131,7 +2125,6 @@
-
@@ -2141,12 +2134,11 @@
-
-
+
@@ -2170,12 +2162,10 @@
-
-
@@ -2191,16 +2181,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -2221,10 +2201,6 @@
-
-
-
-
@@ -2234,11 +2210,20 @@
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
@@ -2249,7 +2234,6 @@
-
@@ -2258,8 +2242,6 @@
-
-
@@ -2288,7 +2270,7 @@
-
+
@@ -2299,6 +2281,17 @@
+
+
+
+
+
+
+
+
+
+
+
@@ -2322,20 +2315,9 @@
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -2373,7 +2355,6 @@
-
@@ -2381,6 +2362,15 @@
+
+
+
+
+
+
+
+
+
@@ -2392,7 +2382,6 @@
-
@@ -2402,7 +2391,6 @@
-
@@ -2422,7 +2410,6 @@
-
@@ -2448,7 +2435,6 @@
-
@@ -2458,7 +2444,6 @@
-
@@ -2468,7 +2453,6 @@
-
@@ -2548,9 +2532,17 @@
+
+
+
+
+
+
+
+
+
-
@@ -2560,8 +2552,6 @@
-
-
@@ -2570,14 +2560,6 @@
-
-
-
-
-
-
-
-
@@ -2596,9 +2578,16 @@
+
+
+
+
+
+
+
+
-
@@ -2608,7 +2597,6 @@
-
@@ -2617,7 +2605,6 @@
-
@@ -2626,8 +2613,6 @@
-
-
@@ -2654,29 +2639,26 @@
-
+
-
-
+
-
+
-
-
+
-
@@ -2706,7 +2688,6 @@
-
@@ -2725,8 +2706,6 @@
-
-
@@ -2737,7 +2716,6 @@
-
@@ -2746,7 +2724,6 @@
-
@@ -2756,7 +2733,6 @@
-
@@ -2765,8 +2741,6 @@
-
-
@@ -2780,23 +2754,20 @@
-
+
-
-
+
-
-
@@ -2804,14 +2775,6 @@
-
-
-
-
-
-
-
-
@@ -2822,12 +2785,9 @@
-
-
-
@@ -2836,7 +2796,6 @@
-
@@ -2845,7 +2804,6 @@
-
@@ -2855,7 +2813,6 @@
-
@@ -2866,7 +2823,6 @@
-
@@ -2877,7 +2833,6 @@
-
@@ -2888,7 +2843,6 @@
-
@@ -2896,16 +2850,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -2938,18 +2882,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
@@ -2961,7 +2893,6 @@
-
@@ -2969,18 +2900,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
@@ -2993,7 +2912,6 @@
-
@@ -3004,7 +2922,6 @@
-
@@ -3014,7 +2931,6 @@
-
@@ -3022,15 +2938,6 @@
-
-
-
-
-
-
-
-
-
@@ -3042,7 +2949,6 @@
-
@@ -3053,7 +2959,6 @@
-
@@ -3064,7 +2969,6 @@
-
@@ -3076,7 +2980,6 @@
-
@@ -3087,18 +2990,44 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
@@ -3110,7 +3039,6 @@
-
@@ -3121,7 +3049,6 @@
-
@@ -3133,10 +3060,8 @@
-
-
@@ -3147,7 +3072,6 @@
-
@@ -3158,7 +3082,6 @@
-
@@ -3170,7 +3093,6 @@
-
@@ -3181,7 +3103,6 @@
-
@@ -3192,7 +3113,6 @@
-
@@ -3203,7 +3123,6 @@
-
@@ -3214,7 +3133,6 @@
-
@@ -3224,7 +3142,6 @@
-
@@ -3234,7 +3151,6 @@
-
@@ -3244,7 +3160,6 @@
-
@@ -3253,7 +3168,6 @@
-
@@ -3264,7 +3178,6 @@
-
@@ -3274,7 +3187,6 @@
-
@@ -3285,7 +3197,6 @@
-
@@ -3295,7 +3206,6 @@
-
@@ -3305,7 +3215,6 @@
-
@@ -3314,16 +3223,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -3336,7 +3235,6 @@
-
@@ -3346,7 +3244,6 @@
-
@@ -3357,7 +3254,6 @@
-
@@ -3368,8 +3264,6 @@
-
-
@@ -3380,7 +3274,6 @@
-
@@ -3390,9 +3283,6 @@
-
-
-
@@ -3414,7 +3304,6 @@
-
@@ -3424,11 +3313,8 @@
-
-
-
@@ -3437,7 +3323,6 @@
-
@@ -3457,7 +3342,6 @@
-
@@ -3467,6 +3351,17 @@
+
+
+
+
+
+
+
+
+
+
+
@@ -3485,7 +3380,7 @@
-
+
@@ -3505,8 +3400,6 @@
-
-
@@ -3527,16 +3420,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -3546,16 +3429,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -3565,16 +3438,6 @@
-
-
-
-
-
-
-
-
-
-
@@ -3584,15 +3447,6 @@
-
-
-
-
-
-
-
-
-
@@ -3605,7 +3459,6 @@
-
@@ -3616,7 +3469,6 @@
-
@@ -3636,6 +3488,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -3665,6 +3538,16 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/cime_config/testdefs/testmods_dirs/clm/Fates/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/Fates/user_nl_clm
index 406fb598f6..91df3e2e61 100644
--- a/cime_config/testdefs/testmods_dirs/clm/Fates/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/Fates/user_nl_clm
@@ -16,7 +16,7 @@ hist_fincl1 = 'FATES_NCOHORTS', 'FATES_TRIMMING', 'FATES_AREA_PLANTS',
'FATES_SAPWOODC', 'FATES_LEAFC', 'FATES_FROOTC', 'FATES_REPROC',
'FATES_STRUCTC', 'FATES_NONSTRUCTC', 'FATES_VEGC_ABOVEGROUND',
'FATES_CANOPY_VEGC', 'FATES_USTORY_VEGC', 'FATES_PRIMARY_PATCHFUSION_ERR',
-'FATES_HARVEST_CARBON_FLUX', 'FATES_DISTURBANCE_RATE_FIRE',
+'FATES_HARVEST_WOODPROD_C_FLUX', 'FATES_DISTURBANCE_RATE_FIRE',
'FATES_DISTURBANCE_RATE_LOGGING', 'FATES_DISTURBANCE_RATE_TREEFALL',
'FATES_STOMATAL_COND', 'FATES_LBLAYER_COND', 'FATES_NPP', 'FATES_GPP',
'FATES_AUTORESP', 'FATES_GROWTH_RESP', 'FATES_MAINT_RESP', 'FATES_GPP_CANOPY',
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/README b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/README
new file mode 100644
index 0000000000..9b782cb2a7
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/README
@@ -0,0 +1,4 @@
+Currently the FATES LUH2 category of test mods currently only supports
+4x5 grid resolutions. This is because we only have one LUH2 time series
+dataset for the 4x5 resolution. In the future we will provide more resolutions
+which will be added to the namelist defaults.
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/user_nl_clm
index 854c21407f..e25490ffbb 100644
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2/user_nl_clm
@@ -1 +1,9 @@
+
+! Run a transient case, with vegetation starting from bare ground, but land use starting from LUH state vector on starting date, in a nocomp configuration.
+! From Charlie's list of valid FATES configurations:
+! https://docs.google.com/spreadsheets/d/1eE3sRMYxfocZKbT8uIQhXpjjtfM2feXPRSWXJNoo4jM/edit#gid=0
use_fates_luh = .true.
+use_fates_nocomp = .true.
+use_fates_fixed_biogeog = .true.
+use_fates_sp = .false.
+use_fates_potentialveg = .false.
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestArea/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestArea/include_user_mods
new file mode 100644
index 0000000000..7eb8bb1579
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestArea/include_user_mods
@@ -0,0 +1 @@
+../FatesColdLUH2
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestArea/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestArea/user_nl_clm
new file mode 100644
index 0000000000..426b41b49e
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestArea/user_nl_clm
@@ -0,0 +1 @@
+fates_harvest_mode = 'luhdata_area'
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestMass/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestMass/include_user_mods
new file mode 100644
index 0000000000..7eb8bb1579
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestMass/include_user_mods
@@ -0,0 +1 @@
+../FatesColdLUH2
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestMass/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestMass/user_nl_clm
new file mode 100644
index 0000000000..7b6bc24f5a
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLUH2HarvestMass/user_nl_clm
@@ -0,0 +1 @@
+fates_harvest_mode = 'luhdata_mass'
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm
index 668f9c861d..b27a74031c 100644
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLandUse/user_nl_clm
@@ -1,2 +1,2 @@
flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_4x5_hist_16_CMIP6_1850-2015_c230620.nc'
-do_harvest = .true.
+fates_harvest_mode = 'landuse_timeseries'
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdLogging/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdLogging/user_nl_clm
index 3b74a4fd37..d2079d9e43 100644
--- a/cime_config/testdefs/testmods_dirs/clm/FatesColdLogging/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdLogging/user_nl_clm
@@ -1 +1 @@
-use_fates_logging= .true.
+fates_harvest_mode = 'event_code'
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/README b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/README
new file mode 100644
index 0000000000..88f5c2c8fb
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/README
@@ -0,0 +1,9 @@
+This test mod does not use cold start and is intended to
+be used in conjunction with a test workflow that provides
+an initialization file. Currently this is accomplished
+by using the test mod in conjunction with the PVT
+system test. The PVT system test runs a FATES spin-up
+case using the use_fates_potentialveg mode and then
+references the restart output to run a use_fates_lupft
+transient mode case.
+
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/include_user_mods
new file mode 100644
index 0000000000..4c7aa0f2b4
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/include_user_mods
@@ -0,0 +1 @@
+../Fates
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/user_nl_clm
new file mode 100644
index 0000000000..10044848a0
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFT/user_nl_clm
@@ -0,0 +1 @@
+use_fates_lupft = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesLUPFTAreaHarvest/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFTAreaHarvest/include_user_mods
new file mode 100644
index 0000000000..1ceba4c200
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFTAreaHarvest/include_user_mods
@@ -0,0 +1 @@
+../FatesLUPFT
diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesLUPFTAreaHarvest/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFTAreaHarvest/user_nl_clm
new file mode 100644
index 0000000000..426b41b49e
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/FatesLUPFTAreaHarvest/user_nl_clm
@@ -0,0 +1 @@
+fates_harvest_mode = 'luhdata_area'
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn/include_user_mods
new file mode 100644
index 0000000000..2cc5720115
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn/include_user_mods
@@ -0,0 +1 @@
+../ciso_monthly
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn/user_nl_clm
new file mode 100644
index 0000000000..6b7eb4347d
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn/user_nl_clm
@@ -0,0 +1,3 @@
+ use_matrixcn = .true.
+ use_soil_matrixcn = .true.
+ hist_wrt_matrixcn_diag = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/include_user_mods
new file mode 100644
index 0000000000..0634bda41e
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/include_user_mods
@@ -0,0 +1 @@
+../ciso_monthly_matrixcn
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/shell_commands b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/shell_commands
new file mode 100755
index 0000000000..45fdc7e8fd
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/shell_commands
@@ -0,0 +1,2 @@
+./xmlchange CLM_ACCELERATED_SPINUP=sasu
+./xmlchange MOSART_MODE=NULL
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/user_nl_mosart b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/user_nl_mosart
new file mode 100644
index 0000000000..82243d7d3d
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_monthly_matrixcn_spinup/user_nl_mosart
@@ -0,0 +1 @@
+frivinp = '/dev/null'
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_soil_matrixcn_only/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ciso_soil_matrixcn_only/include_user_mods
new file mode 100644
index 0000000000..ce640345c5
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_soil_matrixcn_only/include_user_mods
@@ -0,0 +1 @@
+../ciso
diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_soil_matrixcn_only/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/ciso_soil_matrixcn_only/user_nl_clm
new file mode 100644
index 0000000000..91b62c0c05
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/ciso_soil_matrixcn_only/user_nl_clm
@@ -0,0 +1,2 @@
+ use_matrixcn = .false.
+ use_soil_matrixcn = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm50dynroots/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/clm50dynroots/user_nl_clm
deleted file mode 100644
index e493c3e8e7..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm50dynroots/user_nl_clm
+++ /dev/null
@@ -1,2 +0,0 @@
-use_dynroot = .true.
-use_hydrstress = .false.
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/include_user_mods
new file mode 100644
index 0000000000..399579f425
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/include_user_mods
@@ -0,0 +1 @@
+../monthly
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm
new file mode 100644
index 0000000000..df20ced9e8
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/clm60_monthly_matrixcn_soilCN30/user_nl_clm
@@ -0,0 +1,2 @@
+use_soil_matrixcn = .true.
+paramfile = '$DIN_LOC_ROOT/lnd/clm2/paramdata/ctsm60_params_cn30.c240625.nc'
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeCiso/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeCiso/include_user_mods
deleted file mode 100644
index 3dabdc9aeb..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeCiso/include_user_mods
+++ /dev/null
@@ -1 +0,0 @@
-../clm60cam6LndTuningMode
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeCiso/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeCiso/user_nl_clm
deleted file mode 100644
index e7627dea50..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeCiso/user_nl_clm
+++ /dev/null
@@ -1,5 +0,0 @@
-! Turn on Carbon isotopes
-use_c13 = .true.
-use_c14 = .true.
-use_c13_timeseries = .true.
-use_c14_bombspike = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeZDustSoilErod/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeZDustSoilErod/include_user_mods
deleted file mode 100644
index 3dabdc9aeb..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeZDustSoilErod/include_user_mods
+++ /dev/null
@@ -1 +0,0 @@
-../clm60cam6LndTuningMode
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeZDustSoilErod/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeZDustSoilErod/user_nl_clm
deleted file mode 100644
index 93b7ee2e48..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningModeZDustSoilErod/user_nl_clm
+++ /dev/null
@@ -1,3 +0,0 @@
-! Turn on using the soil eroditability file in CTSM
-dust_emis_method = 'Zender_2003'
-zender_soil_erod_source = 'lnd'
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_1979Start/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_1979Start/include_user_mods
deleted file mode 100644
index 3dabdc9aeb..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_1979Start/include_user_mods
+++ /dev/null
@@ -1 +0,0 @@
-../clm60cam6LndTuningMode
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_2013Start/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_2013Start/include_user_mods
deleted file mode 100644
index 3dabdc9aeb..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_2013Start/include_user_mods
+++ /dev/null
@@ -1 +0,0 @@
-../clm60cam6LndTuningMode
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm50dynroots/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode/include_user_mods
similarity index 100%
rename from cime_config/testdefs/testmods_dirs/clm/clm50dynroots/include_user_mods
rename to cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode/include_user_mods
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode/shell_commands b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode/shell_commands
new file mode 100644
index 0000000000..7dd25a08bf
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode/shell_commands
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+./xmlchange LND_TUNING_MODE="clm6_0_cam7.0"
+./xmlchange ROF_NCPL='$ATM_NCPL'
+
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_1979Start/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_1979Start/include_user_mods
new file mode 100644
index 0000000000..ef8619d930
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_1979Start/include_user_mods
@@ -0,0 +1 @@
+../clm60cam7LndTuningMode
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_1979Start/shell_commands b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_1979Start/shell_commands
similarity index 100%
rename from cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_1979Start/shell_commands
rename to cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_1979Start/shell_commands
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_2013Start/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_2013Start/include_user_mods
new file mode 100644
index 0000000000..ef8619d930
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_2013Start/include_user_mods
@@ -0,0 +1 @@
+../clm60cam7LndTuningMode
diff --git a/cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_2013Start/shell_commands b/cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_2013Start/shell_commands
similarity index 100%
rename from cime_config/testdefs/testmods_dirs/clm/clm60cam6LndTuningMode_2013Start/shell_commands
rename to cime_config/testdefs/testmods_dirs/clm/clm60cam7LndTuningMode_2013Start/shell_commands
diff --git a/cime_config/testdefs/testmods_dirs/clm/default/shell_commands b/cime_config/testdefs/testmods_dirs/clm/default/shell_commands
index 45eb822729..f1f645afd8 100644
--- a/cime_config/testdefs/testmods_dirs/clm/default/shell_commands
+++ b/cime_config/testdefs/testmods_dirs/clm/default/shell_commands
@@ -1,4 +1,3 @@
#!/bin/bash
./xmlchange CLM_BLDNML_OPTS="-fire_emis" --append
-./xmlchange BFBFLAG="TRUE"
diff --git a/cime_config/testdefs/testmods_dirs/clm/luna/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/luna/user_nl_clm
index cbfbb9f525..fb796ebfaf 100644
--- a/cime_config/testdefs/testmods_dirs/clm/luna/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/luna/user_nl_clm
@@ -1,3 +1,3 @@
use_luna = .true.
use_flexibleCN = .false.
-
+ use_matrixcn = .false.
diff --git a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn/include_user_mods
similarity index 100%
rename from cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/include_user_mods
rename to cime_config/testdefs/testmods_dirs/clm/matrixcnOn/include_user_mods
diff --git a/cime_config/testdefs/testmods_dirs/clm/matrixcnOn/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn/user_nl_clm
new file mode 100644
index 0000000000..185d6a2410
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn/user_nl_clm
@@ -0,0 +1,3 @@
+hist_wrt_matrixcn_diag = .true.
+use_matrixcn = .true.
+use_soil_matrixcn = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/matrixcnOn_ignore_warnings/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn_ignore_warnings/include_user_mods
new file mode 100644
index 0000000000..a3c70cba11
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn_ignore_warnings/include_user_mods
@@ -0,0 +1 @@
+../matrixcnOn
diff --git a/cime_config/testdefs/testmods_dirs/clm/matrixcnOn_ignore_warnings/shell_commands b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn_ignore_warnings/shell_commands
new file mode 100644
index 0000000000..d94ef06a5c
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/matrixcnOn_ignore_warnings/shell_commands
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+./xmlchange CLM_BLDNML_OPTS="-ignore_warnings" --append
+
+# In this testmod directory we are ignoring warnings about running
+# matrixcn in transient simulations AND/OR
+# warnings about running matrixcn with threading NTHRDS_LND > 1.
diff --git a/cime_config/testdefs/testmods_dirs/clm/mimics/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/mimics/user_nl_clm
index 152d91b21e..15b69007d8 100644
--- a/cime_config/testdefs/testmods_dirs/clm/mimics/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/clm/mimics/user_nl_clm
@@ -1 +1,3 @@
soil_decomp_method = 'MIMICSWieder2015'
+use_matrixcn = .false.
+use_soil_matrixcn = .false.
diff --git a/cime_config/testdefs/testmods_dirs/clm/mimics_matrixcn/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/mimics_matrixcn/include_user_mods
new file mode 100644
index 0000000000..48284fffc4
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/mimics_matrixcn/include_user_mods
@@ -0,0 +1 @@
+../mimics
diff --git a/cime_config/testdefs/testmods_dirs/clm/mimics_matrixcn/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/mimics_matrixcn/user_nl_clm
new file mode 100644
index 0000000000..083717eb95
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/mimics_matrixcn/user_nl_clm
@@ -0,0 +1 @@
+use_matrixcn = .true.
diff --git a/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/include_user_mods
new file mode 100644
index 0000000000..399579f425
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/include_user_mods
@@ -0,0 +1 @@
+../monthly
diff --git a/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/shell_commands b/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/shell_commands
new file mode 100755
index 0000000000..3a435b6233
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/shell_commands
@@ -0,0 +1,4 @@
+./xmlchange CLM_ACCELERATED_SPINUP=sasu
+./xmlchange DATM_YR_START=1901,DATM_YR_END=1902
+./xmlchange MOSART_MODE=NULL
+
diff --git a/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/user_nl_clm
new file mode 100644
index 0000000000..2e232a3f40
--- /dev/null
+++ b/cime_config/testdefs/testmods_dirs/clm/monthly_matrixcn_fast_spinup/user_nl_clm
@@ -0,0 +1 @@
+nyr_sasu = 1
diff --git a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README b/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README
deleted file mode 100644
index aefd8adee7..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README
+++ /dev/null
@@ -1,10 +0,0 @@
-This testmod directory currently isn't used in any tests, but is useful
-for the following reason:
-
-According to Mariana Vertenstein: At least at one point, you could get
-bit-for-bit answers when comparing the mct and nuopc versions of CTSM in
-an I compset with SROF and SGLC, if using the changes in this testmod in
-both the mct and nuopc runs.
-
-So we are keeping this around in case someone wants to reproduce that
-comparison.
diff --git a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl b/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl
deleted file mode 100644
index 6bfefec90b..0000000000
--- a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl
+++ /dev/null
@@ -1,4 +0,0 @@
-orb_eccen = 0.
-orb_mvelp = 0.
-orb_obliq = 0.
-orb_mode = "fixed_parameters"
diff --git a/cime_config/testdefs/testmods_dirs/clm/pts/shell_commands b/cime_config/testdefs/testmods_dirs/clm/pts/shell_commands
index 1613d28b25..ad140e45e1 100644
--- a/cime_config/testdefs/testmods_dirs/clm/pts/shell_commands
+++ b/cime_config/testdefs/testmods_dirs/clm/pts/shell_commands
@@ -22,5 +22,3 @@
./xmlchange NTASKS_ROF=1
./xmlchange NTASKS_WAV=1
./xmlchange NTASKS_ESP=1
-./xmlchange MOSART_MODE=NULL
-./xmlchange RTM_MODE=NULL
diff --git a/cime_config/testdefs/testmods_dirs/clm/rtmColdSSP/user_nl_rtm b/cime_config/testdefs/testmods_dirs/clm/rtmColdSSP/user_nl_rtm
index d1a0254a0b..e78d13a51c 100644
--- a/cime_config/testdefs/testmods_dirs/clm/rtmColdSSP/user_nl_rtm
+++ b/cime_config/testdefs/testmods_dirs/clm/rtmColdSSP/user_nl_rtm
@@ -1,4 +1,4 @@
-finidat_rtm = " "
-rtmhist_mfilt = 1
-rtmhist_ndens = 2
-rtmhist_nhtfrq = 0
+finidat = " "
+mfilt = 1
+ndens = 2
+nhtfrq = 0
diff --git a/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_datm_streams b/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_datm_streams
index 35071ff415..29a8c675ac 100644
--- a/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_datm_streams
+++ b/cime_config/usermods_dirs/PLUMBER2/defaults/user_nl_datm_streams
@@ -21,7 +21,7 @@
! foo:year_first = 1950
! would change the stream year_first stream_entry to 1950 for the foo stream block
!------------------------------------------------------------------------
-! This will come out when cdeps externals are updated
+! This will come out when the cdeps submodule are updated
CLM_USRDAT.PLUMBER2:datavars = ZBOT Sa_z, \
TBOT Sa_tbot, \
QBOT Sa_shum, \
diff --git a/cime_config/usermods_dirs/f09_37x288pt_PanBoreal/shell_commands b/cime_config/usermods_dirs/f09_37x288pt_PanBoreal/shell_commands
new file mode 100644
index 0000000000..1176d22871
--- /dev/null
+++ b/cime_config/usermods_dirs/f09_37x288pt_PanBoreal/shell_commands
@@ -0,0 +1,8 @@
+
+# Change below line if you move the subset data directory
+
+./xmlchange CLM_USRDAT_DIR='$DIN_LOC_ROOT/lnd/clm2/regional_datasets/f09_38x288pt_PanBoreal'
+
+./xmlchange ATM_DOMAIN_MESH='$CLM_USRDAT_DIR/domain.lnd.fv0.9x1.25_gx1v7_f09_38x288pt_PanBoreal_c230524_ESMF_UNSTRUCTURED_MESH.nc'
+./xmlchange LND_DOMAIN_MESH='$CLM_USRDAT_DIR/domain.lnd.fv0.9x1.25_gx1v7_f09_38x288pt_PanBoreal_c230524_ESMF_UNSTRUCTURED_MESH.nc'
+./xmlchange MASK_MESH='$CLM_USRDAT_DIR/domain.lnd.fv0.9x1.25_gx1v7_f09_38x288pt_PanBoreal_c230524_ESMF_UNSTRUCTURED_MESH.nc'
diff --git a/cime_config/usermods_dirs/f09_37x288pt_PanBoreal/user_nl_clm b/cime_config/usermods_dirs/f09_37x288pt_PanBoreal/user_nl_clm
new file mode 100644
index 0000000000..025aa390e7
--- /dev/null
+++ b/cime_config/usermods_dirs/f09_37x288pt_PanBoreal/user_nl_clm
@@ -0,0 +1,22 @@
+!----------------------------------------------------------------------------------
+! Users should add all user specific namelist changes below in the form of
+! namelist_var = new_namelist_value
+!
+! EXCEPTIONS:
+! Set use_cndv by the compset you use and the CLM_BLDNML_OPTS -dynamic_vegetation setting
+! Set use_vichydro by the compset you use and the CLM_BLDNML_OPTS -vichydro setting
+! Set use_cn by the compset you use and CLM_BLDNML_OPTS -bgc setting
+! Set use_crop by the compset you use and CLM_BLDNML_OPTS -crop setting
+! Set spinup_state by the CLM_BLDNML_OPTS -bgc_spinup setting
+! Set co2_ppmv with CCSM_CO2_PPMV option
+! Set fatmlndfrc with LND_DOMAIN_PATH/LND_DOMAIN_FILE options
+! Set finidat with RUN_REFCASE/RUN_REFDATE/RUN_REFTOD options for hybrid or branch cases
+! (includes $inst_string for multi-ensemble cases)
+! or with CLM_FORCE_COLDSTART to do a cold start
+! or set it with an explicit filename here.
+! Set maxpatch_glc with GLC_NEC option
+! Set glc_do_dynglacier with GLC_TWO_WAY_COUPLING env variable
+!----------------------------------------------------------------------------------
+
+
+fsurdat = '$CLM_USRDAT_DIR/surfdata_f09_38x288pt_PanBoreal_hist_16pfts_Irrig_CMIP6_simyr2000_c230523.nc'
diff --git a/cime_config/usermods_dirs/fates_sp/user_nl_clm b/cime_config/usermods_dirs/fates_sp/user_nl_clm
index 093ecd7eda..37da8d1c67 100644
--- a/cime_config/usermods_dirs/fates_sp/user_nl_clm
+++ b/cime_config/usermods_dirs/fates_sp/user_nl_clm
@@ -22,7 +22,7 @@ hist_fexcl1 = 'FATES_TRIMMING', 'FATES_COLD_STATUS', 'FATES_GDD', 'FATES_NCHILLD
'FATES_SAPWOODC', 'FATES_FROOTC', 'FATES_REPROC', 'FATES_STRUCTC', 'FATES_NONSTRUCTC',
'FATES_VEGC_ABOVEGROUND', 'FATES_CANOPY_VEGC', 'FATES_USTORY_VEGC', 'FATES_PRIMARY_PATCHFUSION_ERR',
'FATES_DISTURBANCE_RATE_FIRE', 'FATES_DISTURBANCE_RATE_LOGGING', 'FATES_DISTURBANCE_RATE_TREEFALL',
- 'FATES_HARVEST_CARBON_FLUX', 'FATES_GPP_CANOPY', 'FATES_AUTORESP_CANOPY',
+ 'FATES_HARVEST_WOODPROD_C_FLUX', 'FATES_GPP_CANOPY', 'FATES_AUTORESP_CANOPY',
'FATES_GPP_USTORY', 'FATES_AUTORESP_USTORY', 'FATES_CROWNAREA_CL', 'FATES_DEMOTION_CARBONFLUX',
'FATES_PROMOTION_CARBONFLUX', 'FATES_MORTALITY_CFLUX_CANOPY', 'FATES_MORTALITY_CFLUX_USTORY',
'FATES_DDBH_CANOPY_SZ', 'FATES_DDBH_USTORY_SZ', 'FATES_BASALAREA_SZ',
diff --git a/components/cdeps b/components/cdeps
new file mode 160000
index 0000000000..7476950699
--- /dev/null
+++ b/components/cdeps
@@ -0,0 +1 @@
+Subproject commit 7476950699909813d1938a34bd8d71bf5bfbf1e9
diff --git a/components/cism b/components/cism
new file mode 160000
index 0000000000..c84cc9f5b3
--- /dev/null
+++ b/components/cism
@@ -0,0 +1 @@
+Subproject commit c84cc9f5b3103766a35d0a7ddd5e9dbd7deae762
diff --git a/components/cmeps b/components/cmeps
new file mode 160000
index 0000000000..47fb4e633a
--- /dev/null
+++ b/components/cmeps
@@ -0,0 +1 @@
+Subproject commit 47fb4e633a76ec6d60969b1af751f90790387246
diff --git a/components/mizuRoute b/components/mizuRoute
new file mode 160000
index 0000000000..81c720c7ee
--- /dev/null
+++ b/components/mizuRoute
@@ -0,0 +1 @@
+Subproject commit 81c720c7ee51f9c69f2934f696078c42f4493565
diff --git a/components/mosart b/components/mosart
new file mode 160000
index 0000000000..e2ffe00004
--- /dev/null
+++ b/components/mosart
@@ -0,0 +1 @@
+Subproject commit e2ffe00004cc416cfc8bcfae2a949474075c1d1f
diff --git a/components/rtm b/components/rtm
new file mode 160000
index 0000000000..b3dfcfbba5
--- /dev/null
+++ b/components/rtm
@@ -0,0 +1 @@
+Subproject commit b3dfcfbba58c151ac5a6ab513b3515ef3deff798
diff --git a/doc/.ChangeLog_template b/doc/.ChangeLog_template
index d7ba696835..60a7f49288 100644
--- a/doc/.ChangeLog_template
+++ b/doc/.ChangeLog_template
@@ -47,6 +47,8 @@ Changes made to namelist defaults (e.g., changed parameter values):
Changes to the datasets (e.g., parameter, surface or initial files):
+Changes to documentation:
+
Substantial timing or memory changes:
[e.g., check PFS test in the test suite and look at timings, if you
expect possible significant timing changes]
@@ -155,7 +157,7 @@ Other details
-------------
[Remove any lines that don't apply. Remove entire section if nothing applies.]
-List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+List any git submodules updated (cime, rtm, mosart, cism, fates, etc.):
Pull Requests that document the changes (include PR ids):
(https://github.com/ESCOMP/ctsm/pull)
diff --git a/doc/ChangeLog b/doc/ChangeLog
index 671f5020de..506b3d5ad0 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,4 +1,1615 @@
===============================================================
+Tag name: ctsm5.2.015
+Originator(s): multiple (Samuel Levis,UCAR/TSS,303-665-1310, @mvertens, @jedwards4b, @billsacks, @Katetc)
+Date: Mon 22 Jul 2024 12:46:17 PM MDT
+One-line Summary: Update submodule tags to pass runoff from cism to rof
+
+Purpose and description of changes
+----------------------------------
+
+ - Update MOSART, CMEPS, and CISM so CISM runoff goes to ROF rather than CTSM
+ - Update RTM with fix needed for Paleo LGM work
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2590 Update CMEPS/MOSART/CISM/RTM tags
+ Fixes https://github.com/ESCOMP/RTM/issues/50 Likely wrong RTM river flux to MOM6 within cesm2_3_beta17
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ Issue https://github.com/ESCOMP/RTM/issues/50
+ Likely wrong RTM river flux to MOM6 within cesm2_3_beta17 is now fixed with
+ https://github.com/ESCOMP/RTM/pull/51
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ Differences in namelist 'mosart_inparm':
+ missing variable: 'do_rtmflood'
+ missing variable: 'finidat_rtm'
+ missing variable: 'frivinp_rtm'
+ missing variable: 'rtmhist_fexcl1'
+ missing variable: 'rtmhist_fexcl2'
+ missing variable: 'rtmhist_fexcl3'
+ missing variable: 'rtmhist_fincl1'
+ missing variable: 'rtmhist_fincl2'
+ missing variable: 'rtmhist_fincl3'
+ missing variable: 'rtmhist_mfilt'
+ missing variable: 'rtmhist_ndens'
+ missing variable: 'rtmhist_nhtfrq'
+ found extra variable: 'budget_frq'
+ found extra variable: 'fexcl1'
+ found extra variable: 'fexcl2'
+ found extra variable: 'fexcl3'
+ found extra variable: 'fincl1'
+ found extra variable: 'fincl2'
+ found extra variable: 'fincl3'
+ found extra variable: 'finidat'
+ found extra variable: 'frivinp'
+ found extra variable: 'mfilt'
+ found extra variable: 'mosart_euler_calc'
+ found extra variable: 'mosart_tracers'
+ found extra variable: 'ndens'
+ found extra variable: 'nhtfrq'
+ found extra variable: 'use_halo_option'
+
+Changes to documentation:
+ Not that I am aware of
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ ctsm_sci
+ derecho ---- OK (while in tag ctsm5.2.007; I did not repeat with the latest; see checklist in #2590)
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: mosart and rtm
+ - what platforms/compilers: all
+ - nature of change: mosart roundoff; rtm larger than roundoff due to bug fix
+
+ We are ignoring strange diffs from baseline in two tests in variable
+ FATES_TRANSITION_MATRIX_LULU as explained in issue #2656.
+
+Other details
+-------------
+List any git submodules updated (cime, rtm, mosart, cism, fates, etc.):
+ See .gitmodules: cism, rtm, mosart, ccs_config, cmeps
+
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2605
+ https://github.com/ESCOMP/RTM/pull/51
+ https://github.com/ESCOMP/MOSART/pull/94
+ https://github.com/ESCOMP/CISM-wrapper/pull/100
+ https://github.com/ESCOMP/CMEPS/pull/463
+ https://github.com/ESMCI/ccs_config_cesm/pull/174
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.014
+Originator(s): chrislxj (Xingjie Lu, Northern Arizona U., xingjie.lu@nau.edu)
+Date: Fri 19 Jul 2024 11:04:17 AM MDT
+One-line Summary: use_matrixcn, use_soil_matrixcn come in as default .false.
+
+Purpose and description of changes
+----------------------------------
+
+ Currently set as default .false.:
+ - Add matrix module for vegetation and soil C and N cycle
+ - Add diagnostic variables C and N storage capacity in history files
+ - Add Sparse matrix module to increase the code efficiency
+ - Create spin-up switch, and be ready for matrix spin up development
+
+ Additional contributors: Yuanyuan Huang, Zhenggang Du, and Yiqi Luo from
+ Professor Yiqi Luo's EcoLab at Northern Arizona University, now at Cornell U.
+
+ In TSS/CGD/NCAR/UCAR, contributors in the last year include slevis, ekluzek, wwieder.
+ I apologize if I have omitted others who may have been involved prior.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #903 Bring CN-Matrix solution into CTSM
+ Fixes #2450 Cn-matrix testing in the aux_clm test-suites
+ Fixes #2621 matrixcn does not work with nrepr != 1 (number of crop reproductive pools)
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ use_matrixcn and use_soil_matrixcn ARE NOT available for clm4_5
+ and ARE recommended for accelerating bgc
+ spin-ups (keep reading for more info) and NOT recommended for
+ - transient simulations (e.g. IHist)
+ - simulations that use threading (e.g. see tests with P64x2)
+ - NWP compsets
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ New: use_matrixcn, use_soil_matrixcn, hist_wrt_matrixcn_diag.
+ All three are default .false. at this time.
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ - hillslope_fsat_equals_zero is now default .false. when
+ use_hillslope = .false. as per issue #2652
+ - clm_accelerated_spinup can be set to on, sasu, off; matrixcn spin-up
+ is performed in that order (on, sasu, off); in the "on" phase, matrixcn
+ is not active, in the "sasu" phase matrixcn is active, and in the "off"
+ phase, matrixcn may be active
+
+Changes to documentation:
+ None at this time. A description of the spinup procedure appears in
+ https://github.com/NCAR/LMWG_dev/issues/58
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+ - Future changes to the bgc will likely require changes in both
+ matrix and non-matrix parts of the code.
+ - matrixcn seems sensitive to changes in subgrid heterogeneity and
+ other subtleties. For example, matrixcn simulations have failed from
+ using a finidat file from a different simulation (#2592),
+ from running in transient mode (#2592), from running with threading (#2619),
+ and from setting hillslope_fsat_equals_zero = .true. (issue #2652).
+ - We recommend using matrixcn to accelerate bgc spin-ups and not for
+ most (any?) other purposes.
+
+Changes to tests or testing:
+ We introduced numerous new tests to the aux_clm and build-namelist_test.pl
+ test-suites that can be identified by searching "matrix"
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ ctsm_sci
+ derecho ---- OK
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes, roundoff
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: Non-matrix
+ - what platforms/compilers: All
+ - nature of change: roundoff
+ use_matrixcn and use_soil_matrixcn are default .false.. We get
+ roundoff diffs due to order-of-operation changes in a few sections
+ of code, which improve readability and streamlining of the code
+ in the presence of matrix.
+
+ Testing with the non-default use_matrixcn and use_soil_matrixcn = .true.
+ appears here: https://github.com/NCAR/LMWG_dev/issues/58
+
+ PR #640 explains how answers are expected to
+ change from non-matrix to matrix simulations.
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/640
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.013
+Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu)
+ glemieux (Gregory Lemieux, LBNL, glemieux@lbl.gov)
+Date: Thu Jul 18 10:51:11 MDT 2024
+One-line Summary: FATES Land Use V2
+
+Purpose and description of changes
+----------------------------------
+This tag enables FATES to utilize its land use mode with fixed
+biogeography and no competition mode engaged. To facilitate
+this update, the host land model reads in a new static map data
+set that associates land use with FATES plant functional types.
+This tag also updates the pre-existing FATES dynamic land use
+module to provide access to the raw LUH2 harvest data from the
+FATES LUH2 timeseries data set (added in ctsm5.1.dev160).
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ FATES satellite phenology mode and land use mode are currently incompatible
+ and trying to engage both will result in a graceful build failure message
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ New FATES namelist option: fates_harvest_mode
+ - This new option provides five harvest modes
+ - The 'event_code' mode takes over the now defunct use_fates_logging option
+ - The 'landuse_timeseries' option supersedes the use of do_harvest option
+ with FATES. Using do_harvest is no longer compatible with FATES.
+
+ New FATES namelist option: use_fates_lupft
+ - This option enables the necessary namelist options and data sets to run
+ FATES with land use, no competition, and fixed biogeography modes.
+
+ New FATES namelist option: flandusepftdat
+ - This data set is necessary for running with use_fates_lupft.
+
+ New FATES namelist option: use_potential_veg
+ - This option is only necessary for use with FATES spin-up to transient
+ workflows that will engage the use_fates_lupft mode for the transient case
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ The default FATES parameter file has been updated to account for the new
+ parameters necessary for land use v2.
+
+ A new default data set has been provided for flandusepftdat. Only a 4x5 grid
+ resolution is currently provided.
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ The FATES land use timeseries data set, fluh_timeseries, has been updated to
+ provide a wider timeseries range, from 0850-2015.
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+
+ New FATES testmods have been added to account for the additional harvest mode
+ and use_fates_lupft namelist options. Additoinally a new system text prefix,
+ PVT, has been added to test the use_fates_potentialveg spin-up to use_fates_lupft
+ transient workflow. These have been added to the fates test suite.
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+Fixes #2444 Failing water isotope test on the ctsm5.2 branch
+
+Notes of particular relevance for users
+---------------------------------------
+Changes to documentation: None
+
+Testing summary:
+----------------
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- TBD
+ izumi ------- TBD
+
+ fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--)
+ derecho ----- OK
+
+If the tag used for baseline comparisons was NOT the previous tag, note that here:
+ fates tested against fates-sci.1.76.4_api.35.1.0-ctsm5.2.008
+
+
+Answer changes
+--------------
+Changes answers relative to baseline: Only for FATES test mods, otherwise B4B
+
+Other details
+-------------
+List any git submodules updated (cime, rtm, mosart, cism, fates, etc.):
+ FATES: sci.1.73.0_api.35.0.0 -> sci.1.77.0_api.36.0.0
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+ #2507 -- FATES land use v2 API update (CTSM-side)
+ NGEET#1116 -- V2 Land Use Change
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.012
+Originator(s): sacks (Bill Sacks, UCAR/NCAR/CGD)
+Date: Tue 16 Jul 2024 08:57:42 AM MDT
+One-line Summary: Relax tolerance for truncating small snocan values in CanopyFluxes
+
+Purpose and description of changes
+----------------------------------
+
+Details in the PR #2457.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+Fixes #2444 Failing water isotope test on the ctsm5.2 branch
+
+Notes of particular relevance for users
+---------------------------------------
+Changes to documentation: None
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+Answer changes
+--------------
+
+Changes answers relative to baseline:
+ Yes, roundoff.
+
+ - what code configurations: All
+ - what platforms/compilers: All
+ - nature of change: Changes start roundoff level and grow over time.
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2457
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.011
+Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310)
+Date: Fri 12 Jul 2024 09:45:59 AM MDT
+One-line Summary: Merge b4b-dev
+
+Purpose and description of changes
+----------------------------------
+
+#2638 Remove use_dynroot and corresponding code and testing (slevis)
+#2623 Change a NIWO test to HARV and update expected failures (slevis)
+#2607 Fix py env create (wwieder)
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+Fixes #2451 Remove use_dynroot from clm codes and testing
+Fixes #2563 Unable to install ctsm_pylib environment for mksurfdata_esmf in CTSM5.2.005
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+Change a NIWO test to HARV and update expected failures
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - OK
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - OK
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+
+Other details
+-------------
+[Remove any lines that don't apply. Remove entire section if nothing applies.]
+
+List any git submodules updated (cime, rtm, mosart, cism, fates, etc.):
+
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2638
+ https://github.com/ESCOMP/ctsm/pull/2623
+ https://github.com/ESCOMP/ctsm/pull/2607
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.010
+Originator(s): @cathyxinchangli (Cathy Xinchang Li, U of Illinois - Urbana-Champaign), @Face2sea, @olyson, @fang-bowen, @keerzhang1
+Date: Thu 11 Jul 2024 11:57:15 AM MDT
+One-line Summary: Explicit A/C adoption
+
+Purpose and description of changes
+----------------------------------
+
+ Code changes for adding an explicit air-conditioning (AC) adoption parameterization scheme in CLMU. This includes adding a new time-varying input variable (AC adoption rate, p_ac), changes to building energy calculations, and a toggle (new namelist variable urban_explicit_ac)
+
+ In this tag we keep the change off by default in order to show that answers do not change:
+ - If explicit AC adoption is off, the p_ac_[TBD/HD/MD] variables are set to 1 universally, and the building interior setpoints (tbuildmax_[TBD/HD/MD]) remain unchanged, to ensure the model reproduces previous results.
+ - If explicit AC adoption is on, the p_ac_[TBD/HD/MD] variables take on actual AC adoption rate values (present-day global data were developed together with the code change), and tbuildmax_[TBD/HD/MD] are set to 300K/27°C universally.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2254 Explicitly representing air-conditioning adoption in CESM
+
+Notes of particular relevance for users
+---------------------------------------
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ New namelist variable urban_explicit_ac = .false. by default
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ The default urban time-varying input data file (namelist variable stream_fldfilename_urbantv) now includes p_ac_[TBD/HD/MD] variables, whether or not the explicit-AC-adoption scheme is on and the new file has been rimported to svn: CTSM52_urbantv_Li_2024_0.9x1.25_simyr1849-2106_c20230621.nc
+
+Changes to documentation:
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+Answer changes
+--------------
+
+Changes answers relative to baseline:
+ No, because we have introduced the changes as default .false. at this time
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2275
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.009
+Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
+Date: Wed 10 Jul 2024 12:21:23 PM MDT
+One-line Summary: Allow for CAM7 in lnd_tuning_mode and handle C or E in long compset names
+
+Purpose and description of changes
+----------------------------------
+
+Changes so that CAM7 is allowed for the land-tuning mode. Allow "C" and "E" after period designation in long compset names. Fix an
+issue with Zender dust emission soil eroditability files.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2480 -- cam7 lnd_tuning_mode
+ Fixes #2634 -- allow period part of long-compset name to have a "C" or "E"
+ Fixes #2637 -- bad lnd_tuning_mode for Zender soil eroditability file settings
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ LND_TUNING_MODE will fail with an error for CAM (that's NOT cam4/cam5/cam6), or CPLHIST with CLM51
+ use CLM60 instead
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ Add lnd_tuning_mode options for cam7.0 to namelist defaults
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+ lnd_tuning_mode options duplicate a bunch of XML code lines and should be refactored
+
+Changes to tests or testing:
+ Add cam7.0 test mod directories
+ Change tests for clm6_0_cam6.0 lnd_tuning_mode to clm6_0_cam7.0
+
+Testing summary: regular
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Only the cam4/cam5 lnd_tuning_mode tests
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: lnd_tuning_mode ending with _cam4.0 or _cam5.0 suffix with zender_soil_erod_source='lnd'
+ - what platforms/compilers: All
+ - nature of change: new dust emissions
+
+Other details
+-------------
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+ #2632 -- Handle CAM7
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.008
+Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
+Date: Fri 28 Jun 2024 12:22:46 PM MDT
+One-line Summary: Bring changes on temp-branch to master: b4b-dev, git-fleximod, hillslope fsat
+
+Purpose and description of changes
+----------------------------------
+
+Bring in changes that came in on the tmp-240620 branch to master now that cesm3_0_beta01 has
+been made. The changes are documented in below but include the following tags:
+
+ tmp-240620.n03.ctsm5.2.007 samrabin -- upland hillslope column fsat values to zero
+ tmp-240620.n02.ctsm5.2.007 erik ------ Another update of git-fleximod
+ tmp-240620.n01.ctsm5.2.007 slevis ---- Merge b4b-dev
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+
+Notes of particular relevance for users
+---------------------------------------
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Testing summary:
+----------------
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: No bit-for-bit with branch_tags/tmp-240620.n03.ctsm5.2.007
+
+Other details
+-------------
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+
+===============================================================
+===============================================================
+Tag name: tmp-240620.n03.ctsm5.2.007
+Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu)
+Date: Thu Jun 27 14:17:02 MDT 2024
+One-line Summary: Set upland hillslope column fsat values to zero
+
+Purpose and description of changes
+----------------------------------
+
+The topmodel-based fsat surface runoff scheme is not appropriate for upland hillslope columns, so set upland hillslope column fsat values to zero. User can revert this behavior by setting hillslope_fsat_equals_zero to false.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+- Added parameter hillslope_fsat_equals_zero (default true). Set to false for previous behavior.
+
+
+Testing summary:
+----------------
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- DIFF
+ izumi ------- DIFF
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes, but only for hillslope tests.
+
+
+Other details
+-------------
+
+Pull Requests that document the changes (include PR ids):
+- ESCOMP/CTSM#2600 (https://github.com/ESCOMP/CTSM/pull/2600)
+
+===============================================================
+===============================================================
+Tag name: tmp-240620.n02.ctsm5.2.007
+Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
+Date: Fri 21 Jun 2024 10:21:01 PM MDT
+One-line Summary: Another update of git-fleximod
+
+Purpose and description of changes
+----------------------------------
+
+Just update the gitfleximod subtree to v0.7.8. This fixes an issue in identifying that a tag is out of sync in a submodule.
+An issue about recursion on a submodule. Allows untracked files, and fixes a MS Window Memory error. And
+also git-fleximod status marks optional submodules, which helps explain why they are marked as not-checked out.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+
+List of CTSM issues fixed:
+ Fixes #2591 -- Start using submodule rather than external
+
+Notes of particular relevance for users
+---------------------------------------
+Caveats for users (e.g., need to interpolate initial conditions):
+ doc-builder checkout requires use of "git commit ." at the top level
+
+Changes to documentation:
+ Tweak some of the documentation around git-fleximod and use submodule rather than external.
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+
+Testing summary: regular
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+If the tag used for baseline comparisons was NOT the previous tag, note that here:
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: No bit-for-bit
+
+Other details
+-------------
+List any git submodules updated (cime, rtm, mosart, cism, fates, etc.):
+ git-fleximod subtree updated
+
+Pull Requests that document the changes (include PR ids):
+ #2577 -- Update git-fleximod
+(https://github.com/ESCOMP/ctsm/pull)
+
+===============================================================
+===============================================================
+Tag name: tmp-240620.n01.ctsm5.2.007
+Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310)
+Date: Fri 21 Jun 2024 09:42:26 AM MDT
+One-line Summary: Merge b4b-dev
+
+Purpose and description of changes
+----------------------------------
+
+ Fix long names #2512
+ Dust emissions moved to Object Oriented design #2552
+ Fix RXCROPMATURITY test #2599
+
+ NB: This is the first temporary tag (n01, see full tag name above) in
+ a series of temporary tags while we wait for the completion of the
+ beta01 cesm tag.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2512
+ Fixes #2552
+ Fixes #2599
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ Fix RXCROPMATURITY test #2599
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+
+ ctsm_sci
+ derecho ----
+
+Answer changes
+--------------
+Changes answers relative to baseline: No
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2604
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.007
+Originator(s): jedwards4b (Jim Edwards, UCAR/CGD) slevis (Sam Levis, UCAR/CGD)
+Date: Fri 31 May 2024 13:49:29 AM MDT
+One-line Summary: Rm manage_externals and update documentation accordingly
+
+Purpose and description of changes
+----------------------------------
+#2443 Jim replaced this PR with the next one
+#2559 Remove manage externals
+#2564 Replace checkout_externals with git-fleximod in documentation
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+Fixes #2537 Remove manage_externals stuff from run_sys_tests
+Fixes #2536 Update documentation for git-fleximod
+Fixes #2467 Remove references to cheyenne from the testlist
+
+Notes of particular relevance for users
+---------------------------------------
+Caveats for users (e.g., need to interpolate initial conditions):
+ ./manage_externals/checkout_externals
+ is replaced with
+ ./bin/git-fleximod
+
+Changes to documentation:
+ Accordingly
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ [If python code has changed and you are NOT running aux_clm (e.g., because the only changes are in python
+ code) then also run the clm_pymods test suite; this is a small subset of aux_clm that runs the system
+ tests impacted by python changes. The best way to do this, if you expect no changes from the last tag in
+ either model output or namelists, is: create sym links pointing to the last tag's baseline directory,
+ named with the upcoming tag; then run the clm_pymods test suite comparing against these baselines but NOT
+ doing their own baseline generation. If you are already running the full aux_clm then you do NOT need to
+ separately run the clm_pymods test suite, and you can remove the following line.]
+
+ clm_pymods test suite on derecho -
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+
+ ctsm_sci
+ derecho ---- OK
+
+Answer changes
+--------------
+Changes answers relative to baseline: No
+
+Other details
+-------------
+List any git submodules updated (cime, rtm, mosart, cism, fates, etc.):
+ Comparing .gitmodules against Externals.cfg and Externals_CLM_cfg:
+ - cismwrap_2_2_001 from cismwrap_2_1_100
+ - cime6.0.246 from cime6.0.238_httpsbranch01
+ - cdeps1.0.34 from cdeps1.0.33
+
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2443
+ https://github.com/ESCOMP/ctsm/pull/2559
+ https://github.com/ESCOMP/ctsm/pull/2564
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.006
+Originator(s): slevis (Samuel Levis)
+Date: Tue 28 May 2024 03:14:18 PM MDT
+One-line Summary: Update externals to cesm2_3_beta17, remove mct, retire /test/tools
+
+Purpose and description of changes
+----------------------------------
+
+ #2493 update externals to beta17
+ #2294 remove mct but not in Externals.cfg
+ #2279 Retire the /test/tools framework for CESM test system custom tests that do the same thing
+
+ Changes unrelated to the tag's title:
+ #2546 fix error in cam4/cam5 test (ekluzek)
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2493 update externals to beta17
+ Fixes #2279 Retire the /test/tools framework for CESM test system custom tests that do the same thing
+ Fixes #2546 fix error in cam4/cam5 test (unrelated)
+
+Notes of particular relevance for users
+---------------------------------------
+Changes to documentation:
+ Remove references to mct and cpl7
+
+Substantial timing or memory changes:
+ Not considered
+
+Testing summary:
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+
+ ctsm_sci
+ derecho ---- OK
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+ Except two derecho_nvhpc tests due to an update to the nvhpc compiler
+ as documented in the Pull Request listed a few lines down.
+
+Other details
+-------------
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+ cism, ccs_config, cime, cmeps, cdeps
+
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2539
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.005
+Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
+Date: Mon 13 May 2024 04:46:10 PM MDT
+One-line Summary: Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems
+
+Purpose and description of changes
+----------------------------------
+
+Fix the clm6_0 defaults that were incorrect in ctsm5.2.0. The use-cases needed to be changed to handle clm6_0.
+And clm6_0 wasn't handled in the merge of ctsm5.1.dev174 for the two new settings. Simplified the use-cases which
+should help prevent these problems in the future. use_init_interp will be set for ctsm5.1 finidat files.
+
+Fix some testing. mksurfdata_esmf and the MKSUIRFDATA test didn't work in a CESM checkout. build-namelist unit tests
+weren't going over the use-cases. Also started adding some tests to go over finidat files, but didn't yet capture all of them.
+Add some scripts to compare namelists in order to detect namelist issues for physics
+version updates, and more easily see namelist changes in one place (bld/unit_testers/cmp_baseline_lnd_in_files,
+bld/unit_testers/compare_namelists).
+
+Add some tests to detect issues in CESM and CAM testing (add more tests for cam6.0 forcing, add new IHistClm60BgcCropG
+compset). Add a NoAnthro compset to the ctsm_sci testing. Add I1850Clm60Sp test for f09 and f19 to ctsm_sci.
+
+Change NoAnthro compsets to use RTM rather than MOSART. Add science support to some clm6_0 compsets, as was obvious.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[X] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2492 -- Fix clm6_0 defaults
+ Fixes #2504 -- build-namelist unit tests for use-cases
+ Fixes #2519 -- MKSURFDATAESMF for CESM checkout
+ Fixes #2520 -- B1850 compsets are failing
+ One tick box in #2403 (new initial conditions)
+
+Notes of particular relevance for users
+---------------------------------------
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ Fixes: snow_thermal_cond_method, irrigate and snicar_snobc_intmix for clm6_0
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ New initial conditions for clm5_1/clm6_0 for 1850 from ne30pg3_g17 GSWP3v1 forcing for BgcCrop with ciso
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+ build-namelist unit tester started to add some finidat tests, but only some of them were added to the list
+
+Changes to tests or testing: Add some tests to detect the CESM test issues
+
+
+Testing summary: regular ctsm_sci
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS (737 compare different to baseline)
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+
+ ctsm_sci
+ derecho ---- OK
+
+If the tag used for baseline comparisons was NOT the previous tag, note that here:
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes!
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: clm6_0 (some clm4_5 see below)
+ - what platforms/compilers: All
+ - nature of change (roundoff; larger than roundoff/same climate; new climate):
+ new climate so agrees with clm5_1 namelists, and fixes irrigate for clm4_5
+
+Other details
+-------------
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+
+ #2501 -- Fix clm6_0 defaults
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.004
+Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310)
+Date: Thu 09 May 2024 03:10:52 PM MDT
+One-line Summary: CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes
+
+Purpose and description of changes
+----------------------------------
+
+ Includes work by multiple people:
+ @slevis-lmwg: new fsurdat/landuse files and corresponding infrastructure for
+ the three so-called VR grids (ne0np4)
+ @samsrabin: a subset_data fix
+ @olyson: corrections to a couple of history long-names and updates to
+ history_fields_fates.rst and history_fields_nofates.rst
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+List of CTSM issues fixed (include CTSM Issue # and description) [one per line]:
+ Fixes #2487 VR grids
+ Fixes #2527 subset_data is broken
+ Fixes #2511 a hist long name is incorrect
+ Fixes #2506 a hist long name is incorrect
+
+Notes of particular relevance for users
+---------------------------------------
+Changes made to namelist defaults (e.g., changed parameter values):
+ The so-called VR grids now use 1979 fsurdat files for 1850 compsets
+ and 1979-2026 landuse for historical compsets. The latter cross over into
+ SSP years and use SSP2-4.5 for that.
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ Same comment.
+
+Changes to documentation:
+ See Purpose and Description above.
+
+Substantial timing or memory changes:
+ Not considered.
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ Updated the ctsm_sci tests for the three VR grids.
+
+Testing summary:
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+
+ ctsm_sci
+ derecho ---- OK
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+
+ However, note that the ctsm_sci test-suite showed diffs in
+ - Fates cases as expected since the last available baseline was ctsm_sci-ctsm5.2.0.
+ - the preexisting VR grid test for 1850 as expected since the fsurdat changed to 1979.
+
+Other details
+-------------
+Pull Requests that document the changes (include PR ids):
+ https://github.com/ESCOMP/ctsm/pull/2590
+ https://github.com/ESCOMP/ctsm/pull/2512
+ https://github.com/ESCOMP/ctsm/pull/2528
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.003
+Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu)
+Date: Thu May 2 14:06:54 MDT 2024
+One-line Summary: Merge b4b-dev
+
+Purpose and description of changes
+----------------------------------
+
+Brings in 4 PRs from b4b-dev to master:
+- Regional CTSM Simulations and Capability of Creating Mesh Files (ESCOMP/CTSM#1892; Negin Sobhani and Adrianna Foster)
+- Add line about documentation in PR template (ESCOMP/CTSM#2488; Sam Rabin)
+- CTSM5.2 2000 fsurdat T42 64x128 file (ESCOMP/CTSM#2495; Sam Levis)
+- Move plumber2 scripts to python directory (ESCOMP/CTSM#2505; Teagan King)
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed
+----------
+
+List of CTSM issues fixed (include CTSM Issue # and description):
+- Resolves ESCOMP/CTSM#1513: Need a process to subset ESMF mesh files from global ones for regional grids
+- Resolves ESCOMP/CTSM#1773: High resolution regional simulations
+- Resolves ESCOMP/CTSM#2187: Move new PLUMBER2 scripts to python directory to enable python testing
+- Resolves ESCOMP/CTSM#2486: Temporarily add back a T42 dataset for CAM
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Changes to documentation:
+- Adds documentation for making mesh files
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Changes to tests or testing:
+- Adds testing for mesh-making Python scripts
+- Adds testing for plumber2_surf_wrapper
+
+
+Testing summary:
+----------------
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+
+Other details
+-------------
+
+Pull Requests that document the changes (include PR ids):
+- ESCOMP/CTSM#2513: Merge b4b-dev 2024-05-02
+- Constituent PRs:
+ - ESCOMP/CTSM#1892: Regional CTSM Simulations and Capability of Creating Mesh Files (https://github.com/ESCOMP/CTSM/pull/1892)
+ - ESCOMP/CTSM#2488: Add line about documentation in PR template (https://github.com/ESCOMP/CTSM/pull/2488)
+ - ESCOMP/CTSM#2495: CTSM5.2 2000 fsurdat T42 64x128 file (https://github.com/ESCOMP/CTSM/pull/2495)
+ - ESCOMP/CTSM#2505: Move plumber2 scripts to python directory (https://github.com/ESCOMP/CTSM/pull/2505)
+
+===============================================================
+===============================================================
+Tag name: ctsm5.2.002
+Originator(s): glemieux (Gregory Lemieux, LBNL, glemieux@lbl.gov)
+Date: Fri 26 Apr 2024 11:13:46 AM MDT
+One-line Summary: FATES default allometry parameter file update
+
+Purpose and description of changes
+----------------------------------
+
+This updates the default FATES parameter file which includes a number
+of changes:
+
+ - Default global tree pft allometry update
+ - New allometric mode options
+ - New scaling coefficients for alternative leaf maintenance respiration
+ - New switch to control the use of host land model day length scaling factor
+
+This also incorporates some testing additions and clean up, including:
+
+ - Removes cheyenne expected failure tests that have been converted to derecho
+ - Adds a 5x5_amazon test to aux_clm and the expected failures list
+ - Temporarilry converts a fates 5x5_amazon test to f10 test
+ - Adds namelist check and corresponding unit test to make sure fates hydro
+ and fates satellite phenology mode can not be used together
+
+The FATES externals tag is also updated which includes a number of bug fixes
+and the addition of new history output.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ NOTE: FATES hydro mode and FATES satellite phenology mode can not
+ be used in conjunction as of this API update
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ FATES parameter file default updated to fates_params_api.35.0.0_12pft_c240326.nc
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ A 5x5_amazon smoke test, not using MPI-serial has been added to the test list
+ and list of expected failures to track issue #2423.
+ Out-of-date cheyenne tests on the expected failure list have been removed.
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--)
+
+ derecho ----- OK
+ izumi ------- OK
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes, only for FATES configurations
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: FATES
+ - what platforms/compilers: ALL
+ - nature of change (roundoff; larger than roundoff/same climate; new climate): larger than roundoff
+
+ The FATES externals update incorporates a number of bug fixes and the new allometry default
+ paramters result in a new scientific baseline.
+
+
+Other details
+-------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+ FATES: sci.1.72.2_api.34.0.0 -> sci.1.73.0_api.35.0.0
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+
+#2436 -- FATES API35 parameter file update
+NGEET#1093 -- Update default allometry parameters for tree PFTs
+NGEET#1128 -- New allometric modes
+NGEET#1149 -- Alternative vertical scaling of leaf maintenance respiration
+NGEET#1161 -- Adding day length factor switch
+
+===============================================================
+===============================================================
Tag name: ctsm5.2.001
Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
Date: Mon 22 Apr 2024 02:10:55 PM MDT
diff --git a/doc/ChangeSum b/doc/ChangeSum
index 2451783ec2..9c6524b33d 100644
--- a/doc/ChangeSum
+++ b/doc/ChangeSum
@@ -1,5 +1,22 @@
Tag Who Date Summary
============================================================================================================================
+ ctsm5.2.015 multiple 07/22/2024 Update submodule tags to pass runoff from cism to rof
+ ctsm5.2.014 multiple 07/19/2024 use_matrixcn, use_soil_matrixcn come in as default .false.
+ ctsm5.2.013 glemieux 07/18/2024 FATES Land Use V2
+ ctsm5.2.012 sacks 07/16/2024 Relax tolerance for truncating small snocan values in CanopyFluxes
+ ctsm5.2.011 slevis 07/12/2024 Merge b4b-dev
+ ctsm5.2.010 multiple 07/11/2024 Explicit A/C adoption
+ ctsm5.2.009 erik 07/10/2024 Allow for CAM7 in lnd_tuning_mode and handle C or E in long compset names
+ ctsm5.2.008 erik 06/28/2024 Bring changes on temp-branch to master: b4b-dev, git-fleximod, hillslope fsat
+tmp-240620.n03.ctsm5.2.007 06/27/2024 Set upland hillslope column fsat values to zero (samrabin)
+tmp-240620.n02.ctsm5.2.007 06/21/2024 Another update of git-fleximod (erik)
+tmp-240620.n01.ctsm5.2.007 06/21/2024 Merge b4b-dev (slevis)
+ ctsm5.2.007 multiple 05/31/2024 Rm manage_externals and update documentation accordingly
+ ctsm5.2.006 slevis 05/28/2024 Update externals to cesm2_3_beta17, remove mct, retire /test/tools
+ ctsm5.2.005 erik 05/13/2024 Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems
+ ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes
+ ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev
+ ctsm5.2.002 glemieux 04/26/2024 FATES default allometry parameter file update
ctsm5.2.001 erik 04/22/2024 Merge b4b-dev
ctsm5.2.0 many 04/20/2024 New mksurfdata_esmf tool to create new surface datasets that are in place
ctsm5.1.dev176 afoster 04/04/2024 Merge b4b-dev
diff --git a/doc/README.CHECKLIST.master_tags b/doc/README.CHECKLIST.master_tags
index ed7794130b..51386c4238 100644
--- a/doc/README.CHECKLIST.master_tags
+++ b/doc/README.CHECKLIST.master_tags
@@ -14,9 +14,9 @@ https://github.com/ESCOMP/ctsm/wiki/CTSM-development-workflow
2a -- run 'git pull' to pull in the latest version from GitHub
2b -- run 'git status' and/or 'git diff' to make sure you don't have any uncommitted
local changes
- 2c -- run './manage_externals/checkout_externals -S' to make sure all externals are
- updated and don't have any uncommitted changes. (If any are marked with 's' in
- the first column, run ./manage_externals/checkout_externals to update them.)
+ 2c -- run './bin/git-fleximod status' to make sure all submodules are
+ updated and don't have any uncommitted changes. If any are marked with 's' in
+ the first column, run './bin/git-fleximod update'.
(3) Do all testing on your fork/feature-branch
@@ -26,7 +26,7 @@ https://github.com/ESCOMP/ctsm/wiki/CTSM-development-workflow
$EDITOR cime_config/testdefs/ExpectedTestFails.xml
3c -- make sure you understand any changes to the baselines -- to document in ChangeLog
3d -- Check the log file for run_sys_tests (../run_sys_test.log), to make sure that
- externals are correct (see 2c above)
+ submodules are correct (see 2c above)
(4) Use diff and status to make sure any new files are in the repo and only the correct
changes are on the branch
diff --git a/doc/build_docs b/doc/build_docs
index a8e8099b60..45c7099ec5 100755
--- a/doc/build_docs
+++ b/doc/build_docs
@@ -6,5 +6,5 @@ if [ -f doc-builder/build_docs ]; then
echo "Running: ./doc-builder/build_docs $@"
./doc-builder/build_docs "$@"
else
- echo "Obtain doc-builder by running ./manage_externals/checkout_externals -o from the top-level"
+ echo "Obtain doc-builder by running './bin/git-fleximod update --optional' from the top-level"
fi
diff --git a/doc/design/python_script_user_interface.rst b/doc/design/python_script_user_interface.rst
index 3ad6a4d2cf..d639117810 100644
--- a/doc/design/python_script_user_interface.rst
+++ b/doc/design/python_script_user_interface.rst
@@ -66,7 +66,7 @@ For logical flags, use a flag without an argument -- ``--feature`` for the case
We try to follow the guide at the top of `Python's logging howto `_. In particular, print statements should be used for "console output for ordinary usage of a command line script or program"; ``logger.info`` or ``logger.debug`` should be used to "report events that occur during normal operation of a program (e.g. for status monitoring or fault investigation)", etc.
-The distinction between when to use print and when to use logging can admittedly be a bit subjective, as it comes down to the question of whether the given output is part of the fundamental operation of the script – i.e., part of what the script is designed to do is to give this output. For example, ``run_sys_tests`` prints a variety of information when it starts, particularly concerning the git and manage_externals status of the current repository. The rationale for using ``print`` statements for this is that we designed ``run_sys_tests`` to replace some of the repetitive items that we did whenever running the system tests. One of these items was running ``git status`` and ``./manage_externals/checkout_externals -S -v`` to check that the repository is in a clean state. Thus, in this case, our view is that the output from these commands is part of the fundamental purpose of ``run_sys_tests``: it is something we always want to see, and we feel that it is important for anyone running the system tests to review, and thus ``print`` statements are appropriate here.
+The distinction between when to use print and when to use logging can admittedly be a bit subjective, as it comes down to the question of whether the given output is part of the fundamental operation of the script – i.e., part of what the script is designed to do is to give this output. For example, ``run_sys_tests`` prints a variety of information when it starts, particularly concerning the git and git-fleximod status of the current repository. The rationale for using ``print`` statements for this is that we designed ``run_sys_tests`` to replace some of the repetitive items that we did whenever running the system tests. One of these items was running ``git status`` and ``./bin/git-fleximod status --verbose`` to check that the repository is in a clean state. Thus, in this case, our view is that the output from these commands is part of the fundamental purpose of ``run_sys_tests``: it is something we always want to see, and we feel that it is important for anyone running the system tests to review, and thus ``print`` statements are appropriate here.
In general, ``print`` statements should be used sparingly, just for output that is important for the user to see. That said, users of CTSM scripts often expect more output than you would see from a typical Unix tool (where the philosophy is that there should be no output if everything worked correctly). Some examples of things that users of CTSM scripts typically want to see are:
@@ -81,3 +81,10 @@ More verbose output should go in ``logger.info`` or ``logger.debug`` statements
* e.g. You might want to output a ``logging.debug`` statement for every variable in a file you are editing.
Near the top of each python module where logging is used, there should be a line, ``logger = logging.getLogger(__name__)``. Then logging statements should be done using statements like ``logger.info(...)``, *not* ``logging.info(...)``: this allows more contextual information in logging output.
+
+====================================================
+ Considerations on inclusion of python packages
+====================================================
+
+Since, this is somewhat an implementation detail the discussion for this is in ``../../python/README.python_pkgs.rst``. The python
+packages used is somewhat both an important part of the user interface, the tool design, and an implementation detail.
diff --git a/doc/doc-builder b/doc/doc-builder
new file mode 160000
index 0000000000..ab9bc93dd0
--- /dev/null
+++ b/doc/doc-builder
@@ -0,0 +1 @@
+Subproject commit ab9bc93dd09d0173f8097c7a18c7d061c1cd3b79
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 6c00f5a686..894e9b6c66 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -45,8 +45,8 @@
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+source_suffix = ['.rst', '.md']
+#source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
diff --git a/doc/source/how-to-make-mesh.md b/doc/source/how-to-make-mesh.md
new file mode 100644
index 0000000000..0620598e05
--- /dev/null
+++ b/doc/source/how-to-make-mesh.md
@@ -0,0 +1,98 @@
+# Creating an ESMF mesh file from a netCDF file
+
+This gist includes instructions for creating and visualizing a mesh file from a netcdf file with valid 1D or 2D lats and lons coordinates.
+
+* **ESMF Mesh file** aka **Unstructured Grid File Format** is a netcdf file (format) that includes the information about the grids coordinates and their connectivity to each other.
+
+Additional information about ESMF mesh files are available [here](https://earthsystemmodeling.org/docs/release/ESMF_8_0_1/ESMF_refdoc/node3.html#SECTION03028200000000000000).
+
+------
+
+In this example, we will use `./mesh_maker.py` which uses `mesh_type.py` to create a mesh file and visualize it.
+
+1- First clone my fork and branch that includes these capabilities:
+``` Shell
+git clone https://github.com/negin513/ctsm.git ctsm_mesh
+cd ctsm_mesh
+
+git checkout subset_mesh_dask
+```
+
+2- Next run mesh_maker.py for a netcdf file:
+
+```
+cd tools/site_and_regional
+```
+Check all the avaialble options:
+
+```
+./mesh_maker.py --help
+```
+
+The output shows all available options for this script:
+```
+|------------------------------------------------------------------|
+|--------------------- Instructions -----------------------------|
+|------------------------------------------------------------------|
+This script creates ESMF unstructured GRID (mesh file) from a netcdf
+file with valid lats and lons. Provided lats and lons can be 1D or 2D.
+
+For example for running WRF-CTSM cases, the user can create a mesh
+file for their domain :
+ ./mesh_maker.py --input wrfinput_d01 --output my_region
+ --lat XLAT --lon XLONG --verbose
+
+optional arguments:
+ -h, --help show this help message and exit
+ --input INPUT Netcdf input file for creating ESMF mesh.
+ --output OUTPUT Name of the ESMF mesh created.
+ --outdir OUT_DIR Output directory (only if name of output mesh is not
+ defined)
+ --lat LAT_NAME Name of latitude varibale on netcdf input file. If none
+ given, looks to find variables that include 'lat'.
+ --lon LON_NAME Name of latitude varibale on netcdf input file. If none
+ given, looks to find variables that include 'lon'.
+ --mask MASK_NAME Name of mask varibale on netcdf input file. If none given,
+ create a fake mask with values of 1.
+ --area AREA_NAME Name of area variable on netcdf input file. If none given,
+ ESMF calculates element areas automatically.
+ --overwrite If meshfile exists, overwrite the meshfile.
+ -v, --verbose Increase output verbosity
+
+ ```
+
+Let's create a mesh file from a netcdf file with 1D lats and lons. On the sample files provided 1D lat and long coordinates are saved on `lsmlat` and `lsmlon` variables.
+
+```
+./mesh_maker.py --input /glade/scratch/negins/example_files/surfdata_4x5_hist_78pfts_CMIP6_simyr1850_275.0-330.0_-40-15_c220705.nc --output test_mesh_1d.nc --lat lsmlat --lon lsmlon --overwrite
+```
+`--verbose` option also provide additional information for debugging.
+
+This script will create regional and global mesh plots. For example for the above files, the plos are:
+test_mesh_1d_regional.png
+![image](https://user-images.githubusercontent.com/17344536/200441736-972a8136-5c05-4bc9-9bca-b498d972914a.png)
+
+
+test_mesh_1d_global.png
+
+![image](https://user-images.githubusercontent.com/17344536/200441753-d06e95d1-d85b-4216-9c23-d11ba89a31e4.png)
+
+
+
+------
+ ## Creating Mesh files for a WRF domain:
+For running WRF-CTSM cases, we need to create ESMF mesh files for the WRF domain. We can create mesh file from wrfinput (wrf initial condition files). wrfinput has 2D coordinate information on `XLAT` and `XLONG` variable.
+
+For example, let's create a mesh file from a WRF input file for WRF-CTSM run.
+ ```
+./mesh_maker.py --input /glade/scratch/negins/example_files/wrfinput_d01 --output test_mesh_wrf.nc --lat XLAT --lon XLONG --overwrite
+```
+
+This produce mesh files for running for our WRF domain.
+
+Here is how the regional plot looks like for this mesh file:
+
+ ![image](https://user-images.githubusercontent.com/17344536/200442002-1ee5595c-9252-4934-a07c-2f6ad86aff1b.png)
+
+
+
\ No newline at end of file
diff --git a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst
index c0e510c017..fcd8235b62 100644
--- a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst
+++ b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst
@@ -26,12 +26,12 @@ Obtain CTSM by running::
git clone https://github.com/ESCOMP/CTSM.git
cd CTSM
- ./manage_externals/checkout_externals
+ ./bin/git-fleximod update
Then build CTSM and its dependencies. On a machine that has been ported to CIME, the
-command will look like this (example given for NCAR's ``cheyenne`` machine)::
+command will look like this (example given for NCAR's ``derecho`` machine)::
- ./lilac/build_ctsm /glade/scratch/$USER/ctsm_build_dir --machine cheyenne --compiler intel
+ ./lilac/build_ctsm /glade/scratch/$USER/ctsm_build_dir --machine derecho --compiler intel
and then, before building the atmosphere model::
@@ -91,7 +91,7 @@ Building CTSM requires:
- ESMF version 8 or later
- - **ESMF is not needed in general for CTSM, but is needed for LILAC**
+ - **ESMF is needed for LILAC (and for CESM3 and later)**
Obtaining CTSM
==============
@@ -101,11 +101,11 @@ above`) can be obtained with::
git clone https://github.com/ESCOMP/CTSM.git
cd CTSM
- ./manage_externals/checkout_externals
+ ./bin/git-fleximod update
By default, this will put you on the ``master`` branch of CTSM, which is the main
development branch. You can checkout a different branch or tag using ``git checkout``;
-**be sure to rerun** ``./manage_externals/checkout_externals`` **after doing so.**
+**be sure to rerun** ``./bin/git-fleximod update`` **after doing so.**
For more details, see
https://github.com/ESCOMP/CTSM/wiki/Quick-start-to-CTSM-development-with-git
@@ -145,14 +145,15 @@ the `CIME documentation`_.
Building on a CIME-supported machine
------------------------------------
-If you are using a machine that has been ported to CIME_ (for example, NCAR's ``cheyenne``
+If you are using a machine that has been ported to CIME_ (for example, NCAR's ``derecho``
machine), then you do not need to specify much information to ``build_ctsm``. In addition,
in this case, CIME will load the appropriate modules and set the appropriate environment
variables at build time, so you do not need to do anything to set up your environment
-ahead of time. **Building CTSM with LILAC requires ESMF. ESMF is currently an optional
-CIME dependency, so many CIME-ported machines do not provide information on an ESMF
-installation. NCAR's cheyenne machine DOES provide ESMF, but for other machines, you may
-need to add this to your CIME port.**
+ahead of time. **Building CTSM with LILAC requires ESMF. ESMF is an optional
+CIME dependency before CESM3 and not optional for CESM3.
+NCAR's derecho machine DOES provide ESMF. For other machines, you may
+need to add this to your CIME port. Please see esmf.org for download and build
+instructions.**
To build CTSM and its dependencies in this case, run::
@@ -231,7 +232,7 @@ Example usage for a Mac (a simple case) is::
./lilac/build_ctsm ~/ctsm_build_dir --os Darwin --compiler gnu --netcdf-path /usr/local --esmf-mkfile-path /Users/sacks/ESMF/esmf8.0.0/lib/libO/Darwin.gfortranclang.64.mpich3.default/esmf.mk --max-mpitasks-per-node 4 --no-pnetcdf
-Example usage for NCAR's ``cheyenne`` machine (a more complex case) is::
+Example usage for NCAR's ``derecho`` machine (a more complex case) is::
module purge
module load ncarenv/1.3 python/3.7.9 cmake intel/19.1.1 esmf_libs mkl
diff --git a/doc/source/lilac/specific-atm-models/wrf.rst b/doc/source/lilac/specific-atm-models/wrf.rst
index 5d104778ec..ad85fee777 100644
--- a/doc/source/lilac/specific-atm-models/wrf.rst
+++ b/doc/source/lilac/specific-atm-models/wrf.rst
@@ -44,7 +44,7 @@ Clone the CTSM repository::
git clone https://github.com/ESCOMP/CTSM.git
cd CTSM
- ./manage_externals/checkout_externals
+ ./bin/git-fleximod update
.. _build-CTSM-and-dependencies:
diff --git a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst
index f8b5fee002..e76a9cc6f0 100644
--- a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst
+++ b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst
@@ -638,7 +638,7 @@ The output to the above command is as follows:
-vichydro Turn VIC hydrologic parameterizations : [on | off] (default is off)
-crop Toggle for prognostic crop model. [on | off] (default is off)
(can ONLY be turned on when BGC type is CN or CNDV)
- -comp_intf Component interface to use (ESMF or MCT) (default MCT)
+ -comp_intf Component interface to use (default ESMF, currently no other option)
-defaults Specify full path to a configuration file which will be used
to supply defaults instead of the defaults in bld/config_files.
This file is used to specify model configuration parameters only.
diff --git a/doc/source/users_guide/setting-up-and-running-a-case/history_fields_fates.rst b/doc/source/users_guide/setting-up-and-running-a-case/history_fields_fates.rst
index ec10de5080..84caf92465 100644
--- a/doc/source/users_guide/setting-up-and-running-a-case/history_fields_fates.rst
+++ b/doc/source/users_guide/setting-up-and-running-a-case/history_fields_fates.rst
@@ -1,18 +1,18 @@
=============================
CTSM History Fields (fates)
=============================
-
+
CAUTION: Not all variables are relevant / present for all CTSM cases.
Key flags used in this CTSM case:
use_cn = F
use_crop = F
use_fates = T
-
-=================================== ================ ============================================================================================== ================================================================= =======
+
+=================================== ================ ============================================================================================== ================================================================= =======
CTSM History Fields
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Variable Name Level Dim. Long Description Units Active?
-=================================== ================ ============================================================================================== ================================================================= =======
+=================================== ================ ============================================================================================== ================================================================= =======
A5TMIN - 5-day running mean of min 2-m temperature K F
ACTUAL_IMMOB - actual N immobilization gN/m^2/s T
AGLB - Aboveground leaf biomass kg/m^2 F
@@ -22,13 +22,27 @@ ALTMAX - maximum annual active layer
ALTMAX_LASTYEAR - maximum prior year active layer thickness m F
ATM_O3 - atmospheric ozone partial pressure mol/mol F
ATM_TOPO - atmospheric surface height m T
+AZSUN - azimuth angle of the sun radians F
AnnET - Annual ET mm/s F
BCDEP - total BC deposition (dry+wet) from atmosphere kg/m^2/s T
+BCPHIDRY - black carbon deposition (phidry) from atmosphere kg/m^2/s F
+BCPHIWET - black carbon deposition (phiwet) from atmosphere kg/m^2/s F
+BCPHODRY - black carbon deposition (phodry) from atmosphere kg/m^2/s F
BTRAN - transpiration beta factor unitless T
BTRANMN - daily minimum of transpiration beta factor unitless T
+CH4PROD - Gridcell total production of CH4 gC/m2/s T
+CH4_EBUL_TOTAL_SAT - ebullition surface CH4 flux; (+ to atm) mol/m2/s F
+CH4_EBUL_TOTAL_UNSAT - ebullition surface CH4 flux; (+ to atm) mol/m2/s F
+CH4_SURF_AERE_SAT - aerenchyma surface CH4 flux for inundated area; (+ to atm) mol/m2/s T
+CH4_SURF_AERE_UNSAT - aerenchyma surface CH4 flux for non-inundated area; (+ to atm) mol/m2/s T
+CH4_SURF_DIFF_SAT - diffusive surface CH4 flux for inundated / lake area; (+ to atm) mol/m2/s T
+CH4_SURF_DIFF_UNSAT - diffusive surface CH4 flux for non-inundated area; (+ to atm) mol/m2/s T
+CH4_SURF_EBUL_SAT - ebullition surface CH4 flux for inundated / lake area; (+ to atm) mol/m2/s T
+CH4_SURF_EBUL_UNSAT - ebullition surface CH4 flux for non-inundated area; (+ to atm) mol/m2/s T
COL_CTRUNC - column-level sink for C truncation gC/m^2 F
COL_NTRUNC - column-level sink for N truncation gN/m^2 F
-COSZEN - cosine of solar zenith angle none F
+COSZEN - cosine of solar zenith angle (downscaled if downscaling is activated) none F
+COSZEN_GRC - cosine of solar zenith angle none F
CROPPROD1C - 1-yr crop product (grain+biofuel) C gC/m^2 T
CROPPROD1C_LOSS - loss from 1-yr crop product pool gC/m^2/s T
CROPPROD1N - 1-yr crop product (grain+biofuel) N gN/m^2 T
@@ -43,7 +57,15 @@ DPVLTRB3 - turbulent deposition veloci
DPVLTRB4 - turbulent deposition velocity 4 m/s F
DSL - dry surface layer thickness mm T
DSTDEP - total dust deposition (dry+wet) from atmosphere kg/m^2/s T
+DSTDRY1 - dust deposition (dry1) from atmosphere kg/m^2/s F
+DSTDRY2 - dust deposition (dry2) from atmosphere kg/m^2/s F
+DSTDRY3 - dust deposition (dry3) from atmosphere kg/m^2/s F
+DSTDRY4 - dust deposition (dry4) from atmosphere kg/m^2/s F
DSTFLXT - total surface dust emission kg/m2/s T
+DSTWET1 - dust deposition (wet1) from atmosphere kg/m^2/s F
+DSTWET2 - dust deposition (wet2) from atmosphere kg/m^2/s F
+DSTWET3 - dust deposition (wet3) from atmosphere kg/m^2/s F
+DSTWET4 - dust deposition (wet4) from atmosphere kg/m^2/s F
DWT_CROPPROD1C_GAIN - landcover change-driven addition to 1-year crop product pool gC/m^2/s T
DWT_CROPPROD1N_GAIN - landcover change-driven addition to 1-year crop product pool gN/m^2/s T
DWT_PROD100C_GAIN - landcover change-driven addition to 100-yr wood product pool gC/m^2/s F
@@ -52,6 +74,7 @@ DWT_PROD10C_GAIN - landcover change-driven add
DWT_PROD10N_GAIN - landcover change-driven addition to 10-yr wood product pool gN/m^2/s F
DWT_WOODPRODC_GAIN - landcover change-driven addition to wood product pools gC/m^2/s T
DWT_WOODPRODN_GAIN - landcover change-driven addition to wood product pools gN/m^2/s T
+DYN_COL_ADJUSTMENTS_CH4 - Adjustments in ch4 due to dynamic column areas; only makes sense at the column level: should n gC/m^2 F
DYN_COL_SOIL_ADJUSTMENTS_C - Adjustments in soil carbon due to dynamic column areas; only makes sense at the column level: gC/m^2 F
DYN_COL_SOIL_ADJUSTMENTS_N - Adjustments in soil nitrogen due to dynamic column areas; only makes sense at the column level gN/m^2 F
DYN_COL_SOIL_ADJUSTMENTS_NH4 - Adjustments in soil NH4 due to dynamic column areas; only makes sense at the column level: sho gN/m^2 F
@@ -95,10 +118,6 @@ FATES_DAYSINCE_COLDLEAFON - site-level days elapsed sin
FATES_DEMOTION_CARBONFLUX - demotion-associated biomass carbon flux from canopy to understory in kg carbon per m2 per seco kg m-2 s-1 T
FATES_DISTURBANCE_RATE_FIRE - disturbance rate from fire m2 m-2 yr-1 T
FATES_DISTURBANCE_RATE_LOGGING - disturbance rate from logging m2 m-2 yr-1 T
-FATES_DISTURBANCE_RATE_P2P - disturbance rate from primary to primary lands m2 m-2 yr-1 T
-FATES_DISTURBANCE_RATE_P2S - disturbance rate from primary to secondary lands m2 m-2 yr-1 T
-FATES_DISTURBANCE_RATE_POTENTIAL - potential (i.e., including unresolved) disturbance rate m2 m-2 yr-1 T
-FATES_DISTURBANCE_RATE_S2S - disturbance rate from secondary to secondary lands m2 m-2 yr-1 T
FATES_DISTURBANCE_RATE_TREEFALL - disturbance rate from treefall m2 m-2 yr-1 T
FATES_EFFECT_WSPEED - effective wind speed for fire spread in meters per second m s-1 T
FATES_EXCESS_RESP - respiration of un-allocatable carbon gain kg m-2 s-1 T
@@ -128,6 +147,7 @@ FATES_HARVEST_DEBT - Accumulated carbon failed t
FATES_HARVEST_DEBT_SEC - Accumulated carbon failed to be harvested from secondary patches kg C T
FATES_HET_RESP - heterotrophic respiration in kg carbon per m2 per second kg m-2 s-1 T
FATES_IGNITIONS - number of successful fire ignitions per m2 land area per second m-2 s-1 T
+FATES_L2FR - The leaf to fineroot biomass multiplier for target allometry kg kg-1 T
FATES_LAI - leaf area index per m2 land area m2 m-2 T
FATES_LAI_SECONDARY - leaf area index per m2 land area, secondary patches m2 m-2 T
FATES_LBLAYER_COND - mean leaf boundary layer conductance mol m-2 s-1 T
@@ -139,7 +159,7 @@ FATES_LITTER_OUT - litter flux out in kg carbo
FATES_LSTEMMAINTAR - live stem maintenance autotrophic respiration in kg carbon per m2 per second kg m-2 s-1 T
FATES_MAINT_RESP - maintenance respiration in kg carbon per m2 land area per second kg m-2 s-1 T
FATES_MAINT_RESP_SECONDARY - maintenance respiration in kg carbon per m2 land area per second, secondary patches kg m-2 s-1 T
-FATES_MAINT_RESP_UNREDUCED - diagnostic maintenance respiration if the low-carbon-storage reduction is ignored kg m-2 s-1 F
+FATES_MAINT_RESP_UNREDUCED - diagnostic maintenance respiration if the low-carbon-storage reduction is ignored kg m-2 s-1 T
FATES_MORTALITY_CFLUX_CANOPY - flux of biomass carbon from live to dead pools from mortality of canopy plants in kg carbon pe kg m-2 s-1 T
FATES_MORTALITY_CFLUX_USTORY - flux of biomass carbon from live to dead pools from mortality of understory plants in kg carbo kg m-2 s-1 T
FATES_NCHILLDAYS - site-level number of chill days days T
@@ -148,6 +168,7 @@ FATES_NCOHORTS_SECONDARY - total number of cohorts per
FATES_NCOLDDAYS - site-level number of cold days days T
FATES_NEP - net ecosystem production in kg carbon per m2 per second kg m-2 s-1 T
FATES_NESTEROV_INDEX - nesterov fire danger index T
+FATES_NIR_RAD_ERROR - mean two-stream solver error for NIR - T
FATES_NONSTRUCTC - non-structural biomass (sapwood + leaf + fineroot) in kg carbon per m2 kg m-2 T
FATES_NPATCHES - total number of patches per site T
FATES_NPATCHES_SECONDARY - total number of patches per site T
@@ -155,7 +176,6 @@ FATES_NPP - net primary production in k
FATES_NPP_SECONDARY - net primary production in kg carbon per m2 per second, secondary patches kg m-2 s-1 T
FATES_PRIMARY_PATCHFUSION_ERR - error in total primary lands associated with patch fusion m2 m-2 yr-1 T
FATES_PROMOTION_CARBONFLUX - promotion-associated biomass carbon flux from understory to canopy in kg carbon per m2 per sec kg m-2 s-1 T
-FATES_RAD_ERROR - radiation error in FATES RTM W m-2 T
FATES_REPROC - total biomass in live plant reproductive tissues in kg carbon per m2 kg m-2 T
FATES_ROS - fire rate of spread in meters per second m s-1 T
FATES_SAPWOODC - total biomass in live plant sapwood in kg carbon per m2 kg m-2 T
@@ -181,8 +201,12 @@ FATES_UNGERM_SEED_BANK - ungerminated seed mass of a
FATES_USTORY_VEGC - biomass of understory plants in kg carbon per m2 land area kg m-2 T
FATES_VEGC - total biomass in live plants in kg carbon per m2 land area kg m-2 T
FATES_VEGC_ABOVEGROUND - aboveground biomass in kg carbon per m2 land area kg m-2 T
+FATES_VIS_RAD_ERROR - mean two-stream solver error for VIS - T
FATES_WOOD_PRODUCT - total wood product from logging in kg carbon per m2 land area kg m-2 T
FCEV - canopy evaporation W/m^2 T
+FCH4 - Gridcell surface CH4 flux to atmosphere (+ to atm) kgC/m2/s T
+FCH4TOCO2 - Gridcell oxidation of CH4 to CO2 gC/m2/s T
+FCH4_DFSAT - CH4 additional flux due to changing fsat, natural vegetated and crop landunits only kgC/m2/s T
FCO2 - CO2 flux to atmosphere (+ to atm) kgCO2/m2/s F
FCOV - fractional impermeable area unitless T
FCTR - canopy transpiration W/m^2 T
@@ -194,6 +218,8 @@ FGR_R - Rural heat flux into soil/s
FGR_U - Urban heat flux into soil/snow including snow melt W/m^2 F
FH2OSFC - fraction of ground covered by surface water unitless T
FH2OSFC_NOSNOW - fraction of ground covered by surface water (if no snow present) unitless F
+FINUNDATED - fractional inundated area of vegetated columns unitless T
+FINUNDATED_LAG - time-lagged inundated fraction of vegetated columns unitless F
FIRA - net infrared (longwave) radiation W/m^2 T
FIRA_ICE - net infrared (longwave) radiation (ice landunits only) W/m^2 F
FIRA_R - Rural net infrared (longwave) radiation W/m^2 T
@@ -202,8 +228,9 @@ FIRE - emitted infrared (longwave)
FIRE_ICE - emitted infrared (longwave) radiation (ice landunits only) W/m^2 F
FIRE_R - Rural emitted infrared (longwave) radiation W/m^2 T
FIRE_U - Urban emitted infrared (longwave) radiation W/m^2 F
-FLDS - atmospheric longwave radiation (downscaled to columns in glacier regions) W/m^2 T
-FLDS_ICE - atmospheric longwave radiation (downscaled to columns in glacier regions) (ice landunits only) W/m^2 F
+FLDS - atmospheric longwave radiation (downscaled for glacier and hillslope columns) W/m^2 T
+FLDS_ICE - atmospheric longwave radiation (downscaled for glacier and hillslope columns) (ice landunits o W/m^2 F
+FLDS_NOT_DOWNSCALED - atmospheric longwave radiation (pre-downscaling) W/m^2 F
FPG - fraction of potential gpp proportion T
FPI - fraction of potential immobilization proportion T
FROST_TABLE - frost table depth (natural vegetated and crop landunits only) m F
@@ -214,7 +241,7 @@ FSA_R - Rural absorbed solar radiat
FSA_U - Urban absorbed solar radiation W/m^2 F
FSD24 - direct radiation (last 24hrs) K F
FSD240 - direct radiation (last 240hrs) K F
-FSDS - atmospheric incident solar radiation W/m^2 T
+FSDS - atmospheric incident solar radiation (downscaled for glacier and hillslope columns) W/m^2 T
FSDSND - direct nir incident solar radiation W/m^2 T
FSDSNDLN - direct nir incident solar radiation at local noon W/m^2 T
FSDSNI - diffuse nir incident solar radiation W/m^2 T
@@ -222,6 +249,7 @@ FSDSVD - direct vis incident solar r
FSDSVDLN - direct vis incident solar radiation at local noon W/m^2 T
FSDSVI - diffuse vis incident solar radiation W/m^2 T
FSDSVILN - diffuse vis incident solar radiation at local noon W/m^2 T
+FSDS_from_atm - atmospheric incident solar radiation received from atmosphere (pre-downscaling) W/m^2 T
FSH - sensible heat not including correction for land use change and rain/snow conversion W/m^2 T
FSH_G - sensible heat from ground W/m^2 T
FSH_ICE - sensible heat not including correction for land use change and rain/snow conversion (ice landu W/m^2 F
@@ -320,7 +348,7 @@ LIT_MET_N - LIT_MET N
LIT_MET_N_1m - LIT_MET N to 1 meter gN/m^2 F
LIT_MET_N_TO_SOM_ACT_N - decomp. of metabolic litter N to active soil organic N gN/m^2 F
LNC - leaf N concentration gN leaf/m^2 T
-LWdown - atmospheric longwave radiation (downscaled to columns in glacier regions) W/m^2 F
+LWdown - atmospheric longwave radiation (downscaled for glacier and hillslope columns) W/m^2 F
LWup - upwelling longwave radiation W/m^2 F
MORTALITY_CROWNAREA_CANOPY - Crown area of canopy trees that died m2/ha/year T
MORTALITY_CROWNAREA_UNDERSTORY - Crown aera of understory trees that died m2/ha/year T
@@ -337,13 +365,19 @@ M_SOM_PAS_N_TO_LEACHING - passive soil organic N leac
M_SOM_SLO_C_TO_LEACHING - slow soil organic ma C leaching loss gC/m^2/s F
M_SOM_SLO_N_TO_LEACHING - slow soil organic ma N leaching loss gN/m^2/s F
NDEP_TO_SMINN - atmospheric N deposition to soil mineral N gN/m^2/s T
+NEM - Gridcell net adjustment to net carbon exchange passed to atm. for methane production gC/m2/s T
NET_NMIN - net rate of N mineralization gN/m^2/s T
NFIX_TO_SMINN - symbiotic/asymbiotic N fixation to soil mineral N gN/m^2/s T
NSUBSTEPS - number of adaptive timesteps in CLM timestep unitless F
OBU - Monin-Obukhov length m F
OCDEP - total OC deposition (dry+wet) from atmosphere kg/m^2/s T
+OCPHIDRY - organic carbon deposition (phidry) from atmosphere kg/m^2/s F
+OCPHIWET - organic carbon deposition (phiwet) from atmosphere kg/m^2/s F
+OCPHODRY - black carbon deposition (phodry) from atmosphere kg/m^2/s F
PARVEGLN - absorbed par by vegetation at local noon W/m^2 T
-PBOT - atmospheric pressure at surface (downscaled to columns in glacier regions) Pa T
+PBOT - atmospheric pressure at surface (downscaled for glacier and hillslope columns) Pa T
+PBOT_NOT_DOWNSCALED - atmospheric pressure at surface (pre-downscaling) Pa F
+PCH4 - atmospheric partial pressure of CH4 Pa T
PCO2 - atmospheric partial pressure of CO2 Pa T
POTENTIAL_IMMOB - potential N immobilization gN/m^2/s T
POT_F_DENIT - potential denitrification flux gN/m^2/s T
@@ -356,10 +390,11 @@ PROD10C - 10-yr wood product C
PROD10C_LOSS - loss from 10-yr wood product pool gC/m^2/s F
PROD10N - 10-yr wood product N gN/m^2 F
PROD10N_LOSS - loss from 10-yr wood product pool gN/m^2/s F
-PSurf - atmospheric pressure at surface (downscaled to columns in glacier regions) Pa F
+PSurf - atmospheric pressure at surface (downscaled for glacier and hillslope columns) Pa F
Q2M - 2m specific humidity kg/kg T
QAF - canopy air humidity kg/kg F
QBOT - atmospheric specific humidity (downscaled to columns in glacier regions) kg/kg T
+QBOT_NOT_DOWNSCALED - atmospheric specific humidity (pre-downscaling) kg/kg F
QDIRECT_THROUGHFALL - direct throughfall of liquid (rain + above-canopy irrigation) mm/s F
QDIRECT_THROUGHFALL_SNOW - direct throughfall of snow mm/s F
QDRAI - sub-surface drainage mm/s T
@@ -396,6 +431,7 @@ QIRRIG_FROM_GW_UNCONFINED - water added through unconfi
QIRRIG_FROM_SURFACE - water added through surface water irrigation mm/s T
QIRRIG_SPRINKLER - water added via sprinkler irrigation mm/s F
QOVER - total surface runoff (includes QH2OSFC) mm/s T
+QOVER_LAG - time-lagged surface runoff for soil columns mm/s F
QPHSNEG - net negative hydraulic redistribution flux mm/s F
QRGWL - surface runoff at glaciers (liquid only), wetlands, lakes; also includes melted ice runoff fro mm/s T
QRUNOFF - total liquid runoff not including correction for land use change mm/s T
@@ -436,7 +472,7 @@ RAW2 - aerodynamical resistance
RB - leaf boundary resistance s/m F
RH - atmospheric relative humidity % F
RH2M - 2m relative humidity % T
-RH2M_R - Rural 2m specific humidity % F
+RH2M_R - Rural 2m relative humidity % F
RH2M_U - Urban 2m relative humidity % F
RHAF - fractional humidity of canopy air fraction F
RH_LEAF - fractional humidity at leaf surface fraction F
@@ -444,6 +480,7 @@ RSCANOPY - canopy resistance
RSSHA - shaded leaf stomatal resistance s/m T
RSSUN - sunlit leaf stomatal resistance s/m T
Rainf - atmospheric rain, after rain/snow repartitioning based on temperature mm/s F
+Rho_from_atm - atmospheric density (pre-downscaling) kg/m^3 F
Rnet - net radiation W/m^2 F
SABG - solar rad absorbed by ground W/m^2 T
SABG_PEN - Rural solar rad penetrating top soil or snow layer watt/m^2 T
@@ -546,21 +583,24 @@ T10 - 10-day running mean of 2-m
TAF - canopy air temperature K F
TAUX - zonal surface stress kg/m/s^2 T
TAUY - meridional surface stress kg/m/s^2 T
-TBOT - atmospheric air temperature (downscaled to columns in glacier regions) K T
+TBOT - atmospheric air temperature (downscaled for glacier and hillslope columns) K T
TBUILD - internal urban building air temperature K T
TBUILD_MAX - prescribed maximum interior building temperature K F
+TDEPTH - tributary water depth m F
+TDEPTHMAX - tributary bankfull water depth m F
TFLOOR - floor temperature K F
TG - ground temperature K T
TG_ICE - ground temperature (ice landunits only) K F
TG_R - Rural ground temperature K F
TG_U - Urban ground temperature K F
TH2OSFC - surface water temperature K T
-THBOT - atmospheric air potential temperature (downscaled to columns in glacier regions) K T
+THBOT - atmospheric air potential temperature (downscaled for glacier and hillslope columns) K T
TKE1 - top lake level eddy thermal conductivity W/(mK) T
TLAI - total projected leaf area index m^2/m^2 T
TOPO_COL - column-level topographic height m F
TOPO_COL_ICE - column-level topographic height (ice landunits only) m F
TOTCOLC - total column carbon, incl veg and cpool but excl product pools gC/m^2 T
+TOTCOLCH4 - total belowground CH4 (0 for non-lake special landunits in the absence of dynamic landunits) gC/m2 T
TOTCOLN - total column-level N, excluding product pools gN/m^2 T
TOTECOSYSC - total ecosystem carbon, incl veg but excl cpool and product pools gC/m^2 T
TOTECOSYSN - total ecosystem N, excluding product pools gN/m^2 T
@@ -600,8 +640,9 @@ TV - vegetation temperature
TV24 - vegetation temperature (last 24hrs) K F
TV240 - vegetation temperature (last 240hrs) K F
TWS - total water storage mm T
-Tair - atmospheric air temperature (downscaled to columns in glacier regions) K F
+Tair - atmospheric air temperature (downscaled for glacier and hillslope columns) K F
Tair_from_atm - atmospheric air temperature received from atmosphere (pre-downscaling) K F
+Thair_from_atm - atmospheric air potential temperature (pre-downscaling) K F
U10 - 10-m wind m/s T
U10_DUST - 10-m wind for dust model m/s T
U10_ICE - 10-m wind (ice landunits only) m/s F
@@ -611,6 +652,7 @@ URBAN_AC - urban air conditioning flux
URBAN_HEAT - urban heating flux W/m^2 T
USTAR - aerodynamical resistance s/m F
UST_LAKE - friction velocity (lakes only) m/s F
+UWIND - atmospheric U wind velocity magnitude m/s F
VA - atmospheric wind speed plus convective velocity m/s F
VENTILATION - sensible heat flux from building ventilation W/m^2 T
VOLR - river channel total water storage m3 T
@@ -618,11 +660,13 @@ VOLRMCH - river channel main channel
VPD - vpd Pa F
VPD2M - 2m vapor pressure deficit Pa T
VPD_CAN - canopy vapor pressure deficit kPa T
+VWIND - atmospheric V wind velocity magnitude m/s F
WASTEHEAT - sensible heat flux from heating/cooling sources of urban waste heat W/m^2 T
WBT - 2 m Stull Wet Bulb C T
WBT_R - Rural 2 m Stull Wet Bulb C T
WBT_U - Urban 2 m Stull Wet Bulb C T
WIND - atmospheric wind velocity magnitude m/s T
+WTGQ - surface tracer conductance m/s T
Wind - atmospheric wind velocity magnitude m/s F
Z0HG - roughness length over ground, sensible heat (vegetated landunits only) m F
Z0MG - roughness length over ground, momentum (vegetated landunits only) m F
@@ -633,6 +677,7 @@ ZBOT - atmospheric reference heigh
ZETA - dimensionless stability parameter unitless F
ZII - convective boundary height m F
ZWT - water table depth (natural vegetated and crop landunits only) m T
+ZWT_CH4_UNSAT - depth of water table for methane production used in non-inundated area m T
ZWT_PERCH - perched water table depth (natural vegetated and crop landunits only) m T
num_iter - number of iterations unitless F
QICE_FORC elevclas qice forcing sent to GLC mm/s F
@@ -660,39 +705,28 @@ FATES_SCORCH_HEIGHT_APPF fates_levagepft SPITFIRE flame Scorch Heigh
FATES_VEGC_APPF fates_levagepft biomass per PFT in each age bin in kg carbon per m2 kg m-2 F
FATES_MORTALITY_AGESCEN_AC fates_levcacls age senescence mortality by cohort age in number of plants per m2 per year m-2 yr-1 T
FATES_NPLANT_AC fates_levcacls number of plants per m2 by cohort age class m-2 T
-FATES_CROWNAREA_CL fates_levcan total crown area in each canopy layer m2 m-2 T
-FATES_FABD_SHA_TOPLF_CL fates_levcan shade fraction of direct light absorbed by the top leaf layer of each canopy layer 1 F
-FATES_FABD_SUN_TOPLF_CL fates_levcan sun fraction of direct light absorbed by the top leaf layer of each canopy layer 1 F
-FATES_FABI_SHA_TOPLF_CL fates_levcan shade fraction of indirect light absorbed by the top leaf layer of each canopy layer 1 F
-FATES_FABI_SUN_TOPLF_CL fates_levcan sun fraction of indirect light absorbed by the top leaf layer of each canopy layer 1 F
-FATES_LAISHA_TOP_CL fates_levcan LAI in the shade by the top leaf layer of each canopy layer m2 m-2 F
-FATES_LAISUN_TOP_CL fates_levcan LAI in the sun by the top leaf layer of each canopy layer m2 m-2 F
-FATES_PARSHA_Z_CL fates_levcan PAR absorbed in the shade by top leaf layer in each canopy layer W m-2 F
-FATES_PARSUN_Z_CL fates_levcan PAR absorbed in the sun by top leaf layer in each canopy layer W m-2 F
+FATES_CROWNAREA_CL fates_levcan area fraction of the canopy footprint occupied by each canopy-leaf layer m2 m-2 T
+FATES_LAISHA_CL fates_levcan LAI of shaded leaves by canopy layer m2 m-2 F
+FATES_LAISUN_CL fates_levcan LAI of sunlit leaves by canopy layer m2 m-2 F
+FATES_PARSHA_CL fates_levcan PAR absorbed by shaded leaves in each canopy layer W m-2 F
+FATES_PARSUN_CL fates_levcan PAR absorbed by sunlit leaves in each canopy layer W m-2 F
FATES_MORTALITY_AGESCEN_ACPF fates_levcapf age senescence mortality by pft/cohort age in number of plants per m2 per year m-2 yr-1 F
FATES_NPLANT_ACPF fates_levcapf stem number density by pft and age class m-2 F
-FATES_CROWNAREA_CLLL fates_levcnlf total crown area that is occupied by leaves in each canopy and leaf layer m2 m-2 F
-FATES_FABD_SHA_CLLL fates_levcnlf shade fraction of direct light absorbed by each canopy and leaf layer 1 F
-FATES_FABD_SUN_CLLL fates_levcnlf sun fraction of direct light absorbed by each canopy and leaf layer 1 F
-FATES_FABI_SHA_CLLL fates_levcnlf shade fraction of indirect light absorbed by each canopy and leaf layer 1 F
-FATES_FABI_SUN_CLLL fates_levcnlf sun fraction of indirect light absorbed by each canopy and leaf layer 1 F
-FATES_LAISHA_Z_CLLL fates_levcnlf LAI in the shade by each canopy and leaf layer m2 m-2 F
-FATES_LAISUN_Z_CLLL fates_levcnlf LAI in the sun by each canopy and leaf layer m2 m-2 F
+FATES_CROWNAREA_CLLL fates_levcnlf area fraction of the total ground occupied by each canopy-leaf layer m2 m-2 F
+FATES_LAISHA_CLLL fates_levcnlf LAI in the shade by each canopy and leaf layer m2 m-2 F
+FATES_LAISUN_CLLL fates_levcnlf LAI in the sun by each canopy and leaf layer m2 m-2 F
FATES_NET_C_UPTAKE_CLLL fates_levcnlf net carbon uptake in kg carbon per m2 per second by each canopy and leaf layer per unit ground kg m-2 s-1 F
FATES_PARPROF_DIF_CLLL fates_levcnlf radiative profile of diffuse PAR through each canopy and leaf layer (averaged across PFTs) W m-2 F
FATES_PARPROF_DIR_CLLL fates_levcnlf radiative profile of direct PAR through each canopy and leaf layer (averaged across PFTs) W m-2 F
-FATES_PARSHA_Z_CLLL fates_levcnlf PAR absorbed in the shade by each canopy and leaf layer W m-2 F
-FATES_PARSUN_Z_CLLL fates_levcnlf PAR absorbed in the sun by each canopy and leaf layer W m-2 F
-FATES_FABD_SHA_CLLLPF fates_levcnlfpf shade fraction of direct light absorbed by each canopy, leaf, and PFT 1 F
-FATES_FABD_SUN_CLLLPF fates_levcnlfpf sun fraction of direct light absorbed by each canopy, leaf, and PFT 1 F
-FATES_FABI_SHA_CLLLPF fates_levcnlfpf shade fraction of indirect light absorbed by each canopy, leaf, and PFT 1 F
-FATES_FABI_SUN_CLLLPF fates_levcnlfpf sun fraction of indirect light absorbed by each canopy, leaf, and PFT 1 F
-FATES_LAISHA_Z_CLLLPF fates_levcnlfpf LAI in the shade by each canopy, leaf, and PFT m2 m-2 F
-FATES_LAISUN_Z_CLLLPF fates_levcnlfpf LAI in the sun by each canopy, leaf, and PFT m2 m-2 F
+FATES_PARSHA_CLLL fates_levcnlf PAR absorbed in the shade by each canopy and leaf layer W m-2 F
+FATES_PARSUN_CLLL fates_levcnlf PAR absorbed in the sun by each canopy and leaf layer W m-2 F
+FATES_CROWNFRAC_CLLLPF fates_levcnlfpf area fraction of the canopy footprint occupied by each canopy-leaf-pft layer m2 m-2 F
+FATES_LAISHA_CLLLPF fates_levcnlfpf Shaded leaf area by each canopy, leaf, and PFT m2 m-2 F
+FATES_LAISUN_CLLLPF fates_levcnlfpf Sunlit leaf area by each canopy, leaf, and PFT m2 m-2 F
FATES_PARPROF_DIF_CLLLPF fates_levcnlfpf radiative profile of diffuse PAR through each canopy, leaf, and PFT W m-2 F
FATES_PARPROF_DIR_CLLLPF fates_levcnlfpf radiative profile of direct PAR through each canopy, leaf, and PFT W m-2 F
-FATES_PARSHA_Z_CLLLPF fates_levcnlfpf PAR absorbed in the shade by each canopy, leaf, and PFT W m-2 F
-FATES_PARSUN_Z_CLLLPF fates_levcnlfpf PAR absorbed in the sun by each canopy, leaf, and PFT W m-2 F
+FATES_PARSHA_CLLLPF fates_levcnlfpf PAR absorbed in the shade by each canopy, leaf, and PFT W m-2 F
+FATES_PARSUN_CLLLPF fates_levcnlfpf PAR absorbed in the sun by each canopy, leaf, and PFT W m-2 F
FATES_CWD_ABOVEGROUND_DC fates_levcwdsc debris class-level aboveground coarse woody debris stocks in kg carbon per m2 kg m-2 F
FATES_CWD_ABOVEGROUND_IN_DC fates_levcwdsc debris class-level aboveground coarse woody debris input in kg carbon per m2 per second kg m-2 s-1 F
FATES_CWD_ABOVEGROUND_OUT_DC fates_levcwdsc debris class-level aboveground coarse woody debris output in kg carbon per m2 per second kg m-2 s-1 F
@@ -718,6 +752,8 @@ FATES_FUEL_BURNT_BURNFRAC_FC fates_levfuel product of fraction (0-1) o
FATES_FUEL_MOISTURE_FC fates_levfuel spitfire fuel class-level fuel moisture (volumetric) m3 m-3 T
FATES_CANOPYAREA_HT fates_levheight canopy area height distribution m2 m-2 T
FATES_LEAFAREA_HT fates_levheight leaf area height distribution m2 m-2 T
+FATES_PATCHAREA_LU fates_levlanduse patch area by land use type m2 m-2 T
+FATES_DISTURBANCE_RATE_MATRIX_LULU fates_levlulu disturbance rates by land use type x land use type matrix m2 m-2 yr-1 T
FATES_CANOPYCROWNAREA_PF fates_levpft total PFT-level canopy-layer crown area per m2 land area m2 m-2 T
FATES_CROWNAREA_PF fates_levpft total PFT-level crown area per m2 land area m2 m-2 T
FATES_DAYSINCE_DROUGHTLEAFOFF_PF fates_levpft PFT-level days elapsed since drought leaf drop days T
@@ -726,14 +762,17 @@ FATES_DROUGHT_STATUS_PF fates_levpft PFT-level drought status, <
FATES_ELONG_FACTOR_PF fates_levpft PFT-level mean elongation factor (partial flushing/abscission) 1 T
FATES_GPP_PF fates_levpft total PFT-level GPP in kg carbon per m2 land area per second kg m-2 s-1 T
FATES_GPP_SE_PF fates_levpft total PFT-level GPP in kg carbon per m2 land area per second, secondary patches kg m-2 s-1 T
+FATES_L2FR_CANOPY_REC_PF fates_levpft The leaf to fineroot biomass multiplier for recruits (canopy) kg kg-1 T
+FATES_L2FR_USTORY_REC_PF fates_levpft The leaf to fineroot biomass multiplier for recruits (understory) kg kg-1 T
FATES_LEAFC_PF fates_levpft total PFT-level leaf biomass in kg carbon per m2 land area kg m-2 T
FATES_MEANLIQVOL_DROUGHTPHEN_PF fates_levpft PFT-level mean liquid water volume for drought phenolgy m3 m-3 T
FATES_MEANSMP_DROUGHTPHEN_PF fates_levpft PFT-level mean soil matric potential for drought phenology Pa T
FATES_MORTALITY_CFLUX_PF fates_levpft PFT-level flux of biomass carbon from live to dead pool from mortality kg m-2 s-1 T
-FATES_MORTALITY_CSTARV_CFLUX_PF fates_levpft PFT-level flux of biomass carbon from live to dead pool from carbon starvation mortality kg m-2 s-1 T
+FATES_MORTALITY_CSTARV_CFLUX_PF fates_levpft PFT-level flux of biomass carbon from live to dead pool from carbon starvation mortality (both kg m-2 s-1 T
FATES_MORTALITY_FIRE_CFLUX_PF fates_levpft PFT-level flux of biomass carbon from live to dead pool from fire mortality kg m-2 s-1 T
FATES_MORTALITY_HYDRO_CFLUX_PF fates_levpft PFT-level flux of biomass carbon from live to dead pool from hydraulic failure mortality kg m-2 s-1 T
FATES_MORTALITY_PF fates_levpft PFT-level mortality rate in number of individuals per m2 land area per year m-2 yr-1 T
+FATES_MORT_CSTARV_CONT_CFLUX_PF fates_levpft PFT-level flux of biomass carbon from live to dead pool from carbon starvation mortality (Cont kg m-2 s-1 T
FATES_NPLANT_PF fates_levpft total PFT-level number of individuals per m2 land area m-2 T
FATES_NPLANT_SEC_PF fates_levpft total PFT-level number of individuals per m2 land area, secondary patches m-2 T
FATES_NPP_PF fates_levpft total PFT-level NPP in kg carbon per m2 land area per second kg m-2 s-1 T
@@ -776,8 +815,8 @@ FATES_LEAF_ALLOC_CANOPY_SZ fates_levscls allocation to leaves for ca
FATES_LEAF_ALLOC_USTORY_SZ fates_levscls allocation to leaves for understory plants by size class in kg carbon per m2 per second kg m-2 s-1 F
FATES_LSTEMMAINTAR_CANOPY_SZ fates_levscls live stem maintenance autotrophic respiration for canopy plants in kg carbon per m2 per second kg m-2 s-1 F
FATES_LSTEMMAINTAR_USTORY_SZ fates_levscls live stem maintenance autotrophic respiration for understory plants in kg carbon per m2 per se kg m-2 s-1 F
-FATES_M3_MORTALITY_CANOPY_SZ fates_levscls C starvation mortality of canopy plants by size N/ha/yr F
-FATES_M3_MORTALITY_USTORY_SZ fates_levscls C starvation mortality of understory plants by size N/ha/yr F
+FATES_M3_MORTALITY_CANOPY_SZ fates_levscls C starvation mortality of canopy plants by size m-2 yr-1 F
+FATES_M3_MORTALITY_USTORY_SZ fates_levscls C starvation mortality of understory plants by size m-2 yr-1 F
FATES_MAINTAR_CANOPY_SZ fates_levscls maintenance autotrophic respiration of canopy plants in kg carbon per m2 per second by size kg m-2 s-1 F
FATES_MAINTAR_USTORY_SZ fates_levscls maintenance autotrophic respiration of understory plants in kg carbon per m2 per second by siz kg m-2 s-1 F
FATES_MORTALITY_AGESCEN_SE_SZ fates_levscls age senescence mortality by size in number of plants per m2 per year, secondary patches m-2 yr-1 T
@@ -787,7 +826,7 @@ FATES_MORTALITY_BACKGROUND_SZ fates_levscls background mortality by siz
FATES_MORTALITY_CANOPY_SE_SZ fates_levscls total mortality of canopy trees by size class in number of plants per m2, secondary patches m-2 yr-1 T
FATES_MORTALITY_CANOPY_SZ fates_levscls total mortality of canopy trees by size class in number of plants per m2 m-2 yr-1 T
FATES_MORTALITY_CSTARV_SE_SZ fates_levscls carbon starvation mortality by size in number of plants per m2 per year, secondary patches m-2 yr-1 T
-FATES_MORTALITY_CSTARV_SZ fates_levscls carbon starvation mortality by size in number of plants per m2 per year m-2 yr-1 T
+FATES_MORTALITY_CSTARV_SZ fates_levscls carbon starvation mortality by size in number of plants per m2 per year (both continous and te m-2 yr-1 T
FATES_MORTALITY_FIRE_SZ fates_levscls fire mortality by size in number of plants per m2 per year m-2 yr-1 T
FATES_MORTALITY_FREEZING_SE_SZ fates_levscls freezing mortality by size in number of plants per m2 per event, secondary patches m-2 event-1 T
FATES_MORTALITY_FREEZING_SZ fates_levscls freezing mortality by size in number of plants per m2 per year m-2 yr-1 T
@@ -798,7 +837,7 @@ FATES_MORTALITY_LOGGING_SE_SZ fates_levscls logging mortality by size i
FATES_MORTALITY_LOGGING_SZ fates_levscls logging mortality by size in number of plants per m2 per year m-2 yr-1 T
FATES_MORTALITY_SENESCENCE_SE_SZ fates_levscls senescence mortality by size in number of plants per m2 per event, secondary patches m-2 yr-1 T
FATES_MORTALITY_SENESCENCE_SZ fates_levscls senescence mortality by size in number of plants per m2 per year m-2 yr-1 T
-FATES_MORTALITY_TERMINATION_SZ fates_levscls termination mortality by size in number of plants per m2 per year m-2 yr-1 T
+FATES_MORTALITY_TERMINATION_SZ fates_levscls termination mortality (excluding C-starvation) by size in number of plants per m2 per year m-2 yr-1 T
FATES_MORTALITY_USTORY_SZ fates_levscls total mortality of understory trees by size class in individuals per m2 per year m-2 yr-1 T
FATES_NPLANT_CANOPY_SZ fates_levscls number of canopy plants per m2 by size class m-2 T
FATES_NPLANT_SZ fates_levscls number of plants per m2 by size class m-2 T
@@ -845,6 +884,8 @@ FATES_BGSAPMAINTAR_SZPF fates_levscpf below-ground sapwood mainte
FATES_BGSAPWOOD_ALLOC_SZPF fates_levscpf allocation to below-ground sapwood by pft/size in kg carbon per m2 per second kg m-2 s-1 F
FATES_BGSTRUCT_ALLOC_SZPF fates_levscpf allocation to below-ground structural (deadwood) by pft/size in kg carbon per m2 per second kg m-2 s-1 F
FATES_C13DISC_SZPF fates_levscpf C13 discrimination by pft/size per mil F
+FATES_CROWNAREA_CANOPY_SZPF fates_levscpf Total crown area of canopy plants by pft/size m2 m-2 F
+FATES_CROWNAREA_USTORY_SZPF fates_levscpf Total crown area of understory plants by pft/size m2 m-2 F
FATES_DDBH_CANOPY_SZPF fates_levscpf diameter growth increment by pft/size m m-2 yr-1 F
FATES_DDBH_SZPF fates_levscpf diameter growth increment by pft/size m m-2 yr-1 F
FATES_DDBH_USTORY_SZPF fates_levscpf diameter growth increment by pft/size m m-2 yr-1 F
@@ -863,22 +904,22 @@ FATES_LEAFC_CANOPY_SZPF fates_levscpf biomass in leaves of canopy
FATES_LEAFC_SZPF fates_levscpf leaf carbon mass by size-class x pft in kg carbon per m2 kg m-2 F
FATES_LEAFC_USTORY_SZPF fates_levscpf biomass in leaves of understory plants by pft/size in kg carbon per m2 kg m-2 F
FATES_LEAF_ALLOC_SZPF fates_levscpf allocation to leaves by pft/size in kg carbon per m2 per second kg m-2 s-1 F
-FATES_M3_MORTALITY_CANOPY_SZPF fates_levscpf C starvation mortality of canopy plants by pft/size N/ha/yr F
-FATES_M3_MORTALITY_USTORY_SZPF fates_levscpf C starvation mortality of understory plants by pft/size N/ha/yr F
+FATES_M3_MORTALITY_CANOPY_SZPF fates_levscpf C starvation mortality of canopy plants by pft/size m-2 yr-1 F
+FATES_M3_MORTALITY_USTORY_SZPF fates_levscpf C starvation mortality of understory plants by pft/size m-2 yr-1 F
FATES_MAINTAR_SZPF fates_levscpf maintenance autotrophic respiration in kg carbon per m2 per second by pft/size kg m-2 s-1 F
FATES_MORTALITY_AGESCEN_SZPF fates_levscpf age senescence mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_BACKGROUND_SZPF fates_levscpf background mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_CAMBIALBURN_SZPF fates_levscpf fire mortality from cambial burn by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_CANOPY_SZPF fates_levscpf total mortality of canopy plants by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_CROWNSCORCH_SZPF fates_levscpf fire mortality from crown scorch by pft/size in number of plants per m2 per year m-2 yr-1 F
-FATES_MORTALITY_CSTARV_SZPF fates_levscpf carbon starvation mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
+FATES_MORTALITY_CSTARV_SZPF fates_levscpf carbon starvation mortality by pft/size in number of plants per m2 per year (both continous an m-2 yr-1 F
FATES_MORTALITY_FIRE_SZPF fates_levscpf fire mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_FREEZING_SZPF fates_levscpf freezing mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_HYDRAULIC_SZPF fates_levscpf hydraulic mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_IMPACT_SZPF fates_levscpf impact mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_LOGGING_SZPF fates_levscpf logging mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_SENESCENCE_SZPF fates_levscpf senescence mortality by pft/size in number of plants per m2 per year m-2 yr-1 F
-FATES_MORTALITY_TERMINATION_SZPF fates_levscpf termination mortality by pft/size in number pf plants per m2 per year m-2 yr-1 F
+FATES_MORTALITY_TERMINATION_SZPF fates_levscpf termination mortality (excluding C-starvation) by pft/size in number pf plants per m2 per year m-2 yr-1 F
FATES_MORTALITY_USTORY_SZPF fates_levscpf total mortality of understory plants by pft/size in number of plants per m2 per year m-2 yr-1 F
FATES_NPLANT_CANOPY_SZPF fates_levscpf number of canopy plants by size/pft per m2 m-2 F
FATES_NPLANT_SZPF fates_levscpf stem number density by pft/size m-2 F
@@ -1008,8 +1049,11 @@ ratio_k1 levdcmp ratio_k1
ratio_no3_co2 levdcmp ratio_no3_co2 ratio F
soil_bulkdensity levdcmp soil_bulkdensity kg/m3 F
soil_co2_prod levdcmp soil_co2_prod ug C / g soil / day F
+CONC_CH4_SAT levgrnd CH4 soil Concentration for inundated / lake area mol/m3 F
+CONC_CH4_UNSAT levgrnd CH4 soil Concentration for non-inundated area mol/m3 F
FGR_SOIL_R levgrnd Rural downward heat flux at interface below each soil layer watt/m^2 F
HK levgrnd hydraulic conductivity (natural vegetated and crop landunits only) mm/s F
+O2_DECOMP_DEPTH_UNSAT levgrnd O2 consumption from HR and AR for non-inundated area mol/m3/s F
SMP levgrnd soil matric potential (natural vegetated and crop landunits only) mm T
SOILPSI levgrnd soil water potential in each soil layer MPa F
TSOI levgrnd soil temperature (natural vegetated and crop landunits only) K T
@@ -1035,6 +1079,8 @@ SNO_TK_ICE levsno Thermal conductivity (ice l
SNO_T_ICE levsno Snow temperatures (ice landunits only) K F
SNO_Z levsno Snow layer thicknesses m F
SNO_Z_ICE levsno Snow layer thicknesses (ice landunits only) m F
+CONC_O2_SAT levsoi O2 soil Concentration for inundated / lake area mol/m3 T
+CONC_O2_UNSAT levsoi O2 soil Concentration for non-inundated area mol/m3 T
FATES_FRAGMENTATION_SCALER_SL levsoi factor (0-1) by which litter/cwd fragmentation proceeds relative to max rate by soil layer T
FATES_FROOTC_SL levsoi Total carbon in live plant fine-roots over depth kg m-3 T
H2OSOI levsoi volumetric soil water (natural vegetated and crop landunits only) mm3/mm3 T
diff --git a/doc/source/users_guide/setting-up-and-running-a-case/history_fields_nofates.rst b/doc/source/users_guide/setting-up-and-running-a-case/history_fields_nofates.rst
index 89e7dd23fc..67868f75b1 100644
--- a/doc/source/users_guide/setting-up-and-running-a-case/history_fields_nofates.rst
+++ b/doc/source/users_guide/setting-up-and-running-a-case/history_fields_nofates.rst
@@ -8,11 +8,11 @@ use_cn = T
use_crop = T
use_fates = F
-=================================== ================ ============================================================================================== ================================================================= =======
+=================================== ================ ============================================================================================== ================================================================= =======
CTSM History Fields
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Variable Name Level Dim. Long Description Units Active?
-=================================== ================ ============================================================================================== ================================================================= =======
+=================================== ================ ============================================================================================== ================================================================= =======
A10TMIN - 10-day running mean of min 2-m temperature K F
A5TMIN - 5-day running mean of min 2-m temperature K F
ACTUAL_IMMOB - actual N immobilization gN/m^2/s T
@@ -36,10 +36,14 @@ ATM_O3 - atmospheric ozone partial p
ATM_TOPO - atmospheric surface height m T
AVAILC - C flux available for allocation gC/m^2/s F
AVAIL_RETRANSN - N flux available from retranslocation pool gN/m^2/s F
+AZSUN - azimuth angle of the sun radians F
AnnET - Annual ET mm/s F
BAF_CROP - fractional area burned for crop s-1 T
BAF_PEATF - fractional area burned in peatland s-1 T
BCDEP - total BC deposition (dry+wet) from atmosphere kg/m^2/s T
+BCPHIDRY - black carbon deposition (phidry) from atmosphere kg/m^2/s F
+BCPHIWET - black carbon deposition (phiwet) from atmosphere kg/m^2/s F
+BCPHODRY - black carbon deposition (phodry) from atmosphere kg/m^2/s F
BETA - coefficient of convective velocity none F
BGLFR - background litterfall rate 1/s F
BGNPP - belowground NPP gC/m^2/s T
@@ -66,7 +70,8 @@ COL_NTRUNC - column-level sink for N tru
COST_NACTIVE - Cost of active uptake gN/gC T
COST_NFIX - Cost of fixation gN/gC T
COST_NRETRANS - Cost of retranslocation gN/gC T
-COSZEN - cosine of solar zenith angle none F
+COSZEN - cosine of solar zenith angle (downscaled if downscaling is activated) none F
+COSZEN_GRC - cosine of solar zenith angle none F
CPHASE - crop phenology phase 0-not planted, 1-planted, 2-leaf emerge, 3-grain fill, 4-harvest T
CPOOL - temporary photosynthate C pool gC/m^2 T
CPOOL_DEADCROOT_GR - dead coarse root growth respiration gC/m^2/s F
@@ -158,7 +163,15 @@ DPVLTRB3 - turbulent deposition veloci
DPVLTRB4 - turbulent deposition velocity 4 m/s F
DSL - dry surface layer thickness mm T
DSTDEP - total dust deposition (dry+wet) from atmosphere kg/m^2/s T
+DSTDRY1 - dust deposition (dry1) from atmosphere kg/m^2/s F
+DSTDRY2 - dust deposition (dry2) from atmosphere kg/m^2/s F
+DSTDRY3 - dust deposition (dry3) from atmosphere kg/m^2/s F
+DSTDRY4 - dust deposition (dry4) from atmosphere kg/m^2/s F
DSTFLXT - total surface dust emission kg/m2/s T
+DSTWET1 - dust deposition (wet1) from atmosphere kg/m^2/s F
+DSTWET2 - dust deposition (wet2) from atmosphere kg/m^2/s F
+DSTWET3 - dust deposition (wet3) from atmosphere kg/m^2/s F
+DSTWET4 - dust deposition (wet4) from atmosphere kg/m^2/s F
DT_VEG - change in t_veg, last iteration K F
DWT_CONV_CFLUX - conversion C flux (immediate loss to atm) (0 at all times except first timestep of year) gC/m^2/s T
DWT_CONV_CFLUX_DRIBBLED - conversion C flux (immediate loss to atm), dribbled throughout the year gC/m^2/s T
@@ -246,8 +259,9 @@ FIRE - emitted infrared (longwave)
FIRE_ICE - emitted infrared (longwave) radiation (ice landunits only) W/m^2 F
FIRE_R - Rural emitted infrared (longwave) radiation W/m^2 T
FIRE_U - Urban emitted infrared (longwave) radiation W/m^2 F
-FLDS - atmospheric longwave radiation (downscaled to columns in glacier regions) W/m^2 T
-FLDS_ICE - atmospheric longwave radiation (downscaled to columns in glacier regions) (ice landunits only) W/m^2 F
+FLDS - atmospheric longwave radiation (downscaled for glacier and hillslope columns) W/m^2 T
+FLDS_ICE - atmospheric longwave radiation (downscaled for glacier and hillslope columns) (ice landunits o W/m^2 F
+FLDS_NOT_DOWNSCALED - atmospheric longwave radiation (pre-downscaling) W/m^2 F
FPI - fraction of potential immobilization proportion T
FPSN - photosynthesis umol m-2 s-1 T
FPSN24 - 24 hour accumulative patch photosynthesis starting from mid-night umol CO2/m^2 ground/day F
@@ -278,7 +292,7 @@ FSA_R - Rural absorbed solar radiat
FSA_U - Urban absorbed solar radiation W/m^2 F
FSD24 - direct radiation (last 24hrs) K F
FSD240 - direct radiation (last 240hrs) K F
-FSDS - atmospheric incident solar radiation W/m^2 T
+FSDS - atmospheric incident solar radiation (downscaled for glacier and hillslope columns) W/m^2 T
FSDSND - direct nir incident solar radiation W/m^2 T
FSDSNDLN - direct nir incident solar radiation at local noon W/m^2 T
FSDSNI - diffuse nir incident solar radiation W/m^2 T
@@ -286,6 +300,7 @@ FSDSVD - direct vis incident solar r
FSDSVDLN - direct vis incident solar radiation at local noon W/m^2 T
FSDSVI - diffuse vis incident solar radiation W/m^2 T
FSDSVILN - diffuse vis incident solar radiation at local noon W/m^2 T
+FSDS_from_atm - atmospheric incident solar radiation received from atmosphere (pre-downscaling) W/m^2 T
FSH - sensible heat not including correction for land use change and rain/snow conversion W/m^2 T
FSH_G - sensible heat from ground W/m^2 T
FSH_ICE - sensible heat not including correction for land use change and rain/snow conversion (ice landu W/m^2 F
@@ -426,6 +441,7 @@ LEAFC_STORAGE_XFER_ACC - Accumulated leaf C transfer
LEAFC_TO_BIOFUELC - leaf C to biofuel C gC/m^2/s T
LEAFC_TO_LITTER - leaf C litterfall gC/m^2/s F
LEAFC_TO_LITTER_FUN - leaf C litterfall used by FUN gC/m^2/s T
+LEAFC_TO_REMOVEDRESIDUEC - leaf C to removed residue C gC/m^2/s F
LEAFC_XFER - leaf C transfer gC/m^2 F
LEAFC_XFER_TO_LEAFC - leaf C growth from storage gC/m^2/s F
LEAFN - leaf N gN/m^2 T
@@ -487,6 +503,7 @@ LIVESTEMC_STORAGE - live stem C storage
LIVESTEMC_STORAGE_TO_XFER - live stem C shift storage to transfer gC/m^2/s F
LIVESTEMC_TO_BIOFUELC - livestem C to biofuel C gC/m^2/s T
LIVESTEMC_TO_DEADSTEMC - live stem C turnover gC/m^2/s F
+LIVESTEMC_TO_REMOVEDRESIDUEC - livestem C to removed residue C gC/m^2/s F
LIVESTEMC_XFER - live stem C transfer gC/m^2 F
LIVESTEMC_XFER_TO_LIVESTEMC - live stem C growth from storage gC/m^2/s F
LIVESTEMN - live stem N gN/m^2 T
@@ -498,7 +515,7 @@ LIVESTEMN_XFER - live stem N transfer
LIVESTEMN_XFER_TO_LIVESTEMN - live stem N growth from storage gN/m^2/s F
LIVESTEM_MR - live stem maintenance respiration gC/m^2/s F
LNC - leaf N concentration gN leaf/m^2 T
-LWdown - atmospheric longwave radiation (downscaled to columns in glacier regions) W/m^2 F
+LWdown - atmospheric longwave radiation (downscaled for glacier and hillslope columns) W/m^2 F
LWup - upwelling longwave radiation W/m^2 F
MEG_acetaldehyde - MEGAN flux kg/m2/sec T
MEG_acetic_acid - MEGAN flux kg/m2/sec T
@@ -700,6 +717,9 @@ NUPTAKE_NPP_FRACTION - frac of NPP used in N uptak
N_ALLOMETRY - N allocation index none F
OBU - Monin-Obukhov length m F
OCDEP - total OC deposition (dry+wet) from atmosphere kg/m^2/s T
+OCPHIDRY - organic carbon deposition (phidry) from atmosphere kg/m^2/s F
+OCPHIWET - organic carbon deposition (phiwet) from atmosphere kg/m^2/s F
+OCPHODRY - black carbon deposition (phodry) from atmosphere kg/m^2/s F
OFFSET_COUNTER - offset days counter days F
OFFSET_FDD - offset freezing degree days counter C degree-days F
OFFSET_FLAG - offset flag none F
@@ -719,8 +739,9 @@ PAR24_sun - sunlit PAR (24 hrs)
PARVEGLN - absorbed par by vegetation at local noon W/m^2 T
PAR_shade - shade PAR umol/m2/s F
PAR_sun - sunlit PAR umol/m2/s F
-PBOT - atmospheric pressure at surface (downscaled to columns in glacier regions) Pa T
+PBOT - atmospheric pressure at surface (downscaled for glacier and hillslope columns) Pa T
PBOT_240 - 10 day running mean of air pressure Pa F
+PBOT_NOT_DOWNSCALED - atmospheric pressure at surface (pre-downscaling) Pa F
PCH4 - atmospheric partial pressure of CH4 Pa T
PCO2 - atmospheric partial pressure of CO2 Pa T
PCO2_240 - 10 day running mean of CO2 pressure Pa F
@@ -754,10 +775,11 @@ PSNSHA - shaded leaf photosynthesis
PSNSHADE_TO_CPOOL - C fixation from shaded canopy gC/m^2/s T
PSNSUN - sunlit leaf photosynthesis umolCO2/m^2/s T
PSNSUN_TO_CPOOL - C fixation from sunlit canopy gC/m^2/s T
-PSurf - atmospheric pressure at surface (downscaled to columns in glacier regions) Pa F
+PSurf - atmospheric pressure at surface (downscaled for glacier and hillslope columns) Pa F
Q2M - 2m specific humidity kg/kg T
QAF - canopy air humidity kg/kg F
QBOT - atmospheric specific humidity (downscaled to columns in glacier regions) kg/kg T
+QBOT_NOT_DOWNSCALED - atmospheric specific humidity (pre-downscaling) kg/kg F
QDIRECT_THROUGHFALL - direct throughfall of liquid (rain + above-canopy irrigation) mm/s F
QDIRECT_THROUGHFALL_SNOW - direct throughfall of snow mm/s F
QDRAI - sub-surface drainage mm/s T
@@ -839,7 +861,7 @@ RETRANSN - plant pool of retranslocate
RETRANSN_TO_NPOOL - deployment of retranslocated N gN/m^2/s T
RH - atmospheric relative humidity % F
RH2M - 2m relative humidity % T
-RH2M_R - Rural 2m specific humidity % F
+RH2M_R - Rural 2m relative humidity % F
RH2M_U - Urban 2m relative humidity % F
RH30 - 30-day running mean of relative humidity % F
RHAF - fractional humidity of canopy air fraction F
@@ -849,6 +871,7 @@ RR - root respiration (fine root
RSSHA - shaded leaf stomatal resistance s/m T
RSSUN - sunlit leaf stomatal resistance s/m T
Rainf - atmospheric rain, after rain/snow repartitioning based on temperature mm/s F
+Rho_from_atm - atmospheric density (pre-downscaling) kg/m^3 F
Rnet - net radiation W/m^2 F
SABG - solar rad absorbed by ground W/m^2 T
SABG_PEN - Rural solar rad penetrating top soil or snow layer watt/m^2 T
@@ -977,9 +1000,11 @@ T10 - 10-day running mean of 2-m
TAF - canopy air temperature K F
TAUX - zonal surface stress kg/m/s^2 T
TAUY - meridional surface stress kg/m/s^2 T
-TBOT - atmospheric air temperature (downscaled to columns in glacier regions) K T
+TBOT - atmospheric air temperature (downscaled for glacier and hillslope columns) K T
TBUILD - internal urban building air temperature K T
TBUILD_MAX - prescribed maximum interior building temperature K F
+TDEPTH - tributary water depth m F
+TDEPTHMAX - tributary bankfull water depth m F
TEMPAVG_T2M - temporary average 2m air temperature K F
TEMPMAX_RETRANSN - temporary annual max of retranslocated N pool gN/m^2 F
TEMPSUM_POTENTIAL_GPP - temporary annual sum of potential GPP gC/m^2/yr F
@@ -992,7 +1017,7 @@ TG_ICE - ground temperature (ice lan
TG_R - Rural ground temperature K F
TG_U - Urban ground temperature K F
TH2OSFC - surface water temperature K T
-THBOT - atmospheric air potential temperature (downscaled to columns in glacier regions) K T
+THBOT - atmospheric air potential temperature (downscaled for glacier and hillslope columns) K T
THIC - 2 m Temp Hum Index Comfort C T
THIC_R - Rural 2 m Temp Hum Index Comfort C T
THIC_U - Urban 2 m Temp Hum Index Comfort C T
@@ -1060,8 +1085,9 @@ TV240 - vegetation temperature (las
TVEGD10 - 10 day running mean of patch daytime vegetation temperature Kelvin F
TVEGN10 - 10 day running mean of patch night-time vegetation temperature Kelvin F
TWS - total water storage mm T
-Tair - atmospheric air temperature (downscaled to columns in glacier regions) K F
+Tair - atmospheric air temperature (downscaled for glacier and hillslope columns) K F
Tair_from_atm - atmospheric air temperature received from atmosphere (pre-downscaling) K F
+Thair_from_atm - atmospheric air potential temperature (pre-downscaling) K F
U10 - 10-m wind m/s T
U10_DUST - 10-m wind for dust model m/s T
U10_ICE - 10-m wind (ice landunits only) m/s F
@@ -1072,6 +1098,7 @@ URBAN_AC - urban air conditioning flux
URBAN_HEAT - urban heating flux W/m^2 T
USTAR - aerodynamical resistance s/m F
UST_LAKE - friction velocity (lakes only) m/s F
+UWIND - atmospheric U wind velocity magnitude m/s F
VA - atmospheric wind speed plus convective velocity m/s F
VCMX25T - canopy profile of vcmax25 umol/m2/s T
VENTILATION - sensible heat flux from building ventilation W/m^2 T
@@ -1081,6 +1108,7 @@ VOLRMCH - river channel main channel
VPD - vpd Pa F
VPD2M - 2m vapor pressure deficit Pa T
VPD_CAN - canopy vapor pressure deficit kPa T
+VWIND - atmospheric V wind velocity magnitude m/s F
Vcmx25Z - canopy profile of vcmax25 predicted by LUNA model umol/m2/s T
WASTEHEAT - sensible heat flux from heating/cooling sources of urban waste heat W/m^2 T
WBA - 2 m Wet Bulb C T
@@ -1261,7 +1289,7 @@ STEM_PROF levdcmp profile for litter C and N
SUPPLEMENT_TO_SMINN_vr levdcmp supplemental N supply gN/m^3/s F
WFPS levdcmp WFPS percent F
anaerobic_frac levdcmp anaerobic_frac m3/m3 F
-diffus levdcmp diffusivity (from nitrification-denitrification) m^2/s F
+diffus levdcmp diffusivity m^2/s F
fr_WFPS levdcmp fr_WFPS fraction F
n2_n2o_ratio_denit levdcmp n2_n2o_ratio_denit gN/gN F
r_psi levdcmp r_psi m F
diff --git a/doc/source/users_guide/testing/testing.rst b/doc/source/users_guide/testing/testing.rst
index bad1183fff..69ca1f7263 100644
--- a/doc/source/users_guide/testing/testing.rst
+++ b/doc/source/users_guide/testing/testing.rst
@@ -13,12 +13,6 @@ CIME Testing scripts
We first introduce the test scripts that work for all CESM components. The CIME script **create_test** runs a specific type of test, at a given resolution, for a given compset using a given machine. See `CIME Chapter on Testing `_ for how to use it to run single tests as well as lists of tests. The standard testname for CLM is "aux_clm" for cheyenne with intel and gnu compilers as well as the CGD machine hobart for intel, nag, and pgi compilers. There's also a shorter test list called "clm_short". Also see the `CTSM Wiki on Testing `_.
-CTSM Tools Testing
-==================
-
-.. include:: ../../../../test/tools/README
- :literal:
-
CTSM Fortran Unit Tests
=======================
diff --git a/libraries/mct b/libraries/mct
new file mode 160000
index 0000000000..82b0071e69
--- /dev/null
+++ b/libraries/mct
@@ -0,0 +1 @@
+Subproject commit 82b0071e69d14330b75d23b0bc68543ebea9aadc
diff --git a/libraries/parallelio b/libraries/parallelio
new file mode 160000
index 0000000000..f52ade0756
--- /dev/null
+++ b/libraries/parallelio
@@ -0,0 +1 @@
+Subproject commit f52ade075619b32fa141993b5665b0fe099befc2
diff --git a/lilac/bld_templates/mosart_in b/lilac/bld_templates/mosart_in
index 091ec69285..0bc2242dda 100644
--- a/lilac/bld_templates/mosart_in
+++ b/lilac/bld_templates/mosart_in
@@ -4,18 +4,18 @@
delt_mosart = 1800
do_rtm = .true.
do_rtmflood = .false.
- finidat_rtm = " "
- frivinp_rtm = "/glade/campaign/cesm/cesmdata/cseg/inputdata/rof/mosart/MOSART_routing_Global_0.5x0.5_c170601.nc"
+ finidat = " "
+ frivinp = "/glade/campaign/cesm/cesmdata/cseg/inputdata/rof/mosart/MOSART_routing_Global_0.5x0.5_c170601.nc"
ice_runoff = .true.
qgwl_runoff_option = "threshold"
- rtmhist_fexcl1 = ""
- rtmhist_fexcl2 = ""
- rtmhist_fexcl3 = ""
- rtmhist_fincl1 = ""
- rtmhist_fincl2 = ""
- rtmhist_fincl3 = ""
- rtmhist_mfilt = 1
- rtmhist_ndens = 1
- rtmhist_nhtfrq = 0
+ fexcl1 = ""
+ fexcl2 = ""
+ fexcl3 = ""
+ fincl1 = ""
+ fincl2 = ""
+ fincl3 = ""
+ mfilt = 1
+ ndens = 1
+ nhtfrq = 0
smat_option = "Xonly"
/
diff --git a/manage_externals/.dir_locals.el b/manage_externals/.dir_locals.el
deleted file mode 100644
index a370490e92..0000000000
--- a/manage_externals/.dir_locals.el
+++ /dev/null
@@ -1,12 +0,0 @@
-; -*- mode: Lisp -*-
-
-((python-mode
- . (
- ;; fill the paragraph to 80 columns when using M-q
- (fill-column . 80)
-
- ;; Use 4 spaces to indent in Python
- (python-indent-offset . 4)
- (indent-tabs-mode . nil)
- )))
-
diff --git a/manage_externals/.github/ISSUE_TEMPLATE.md b/manage_externals/.github/ISSUE_TEMPLATE.md
deleted file mode 100644
index 8ecb2ae64b..0000000000
--- a/manage_externals/.github/ISSUE_TEMPLATE.md
+++ /dev/null
@@ -1,6 +0,0 @@
-### Summary of Issue:
-### Expected behavior and actual behavior:
-### Steps to reproduce the problem (should include model description file(s) or link to publi c repository):
-### What is the changeset ID of the code, and the machine you are using:
-### have you modified the code? If so, it must be committed and available for testing:
-### Screen output or log file showing the error message and context:
diff --git a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md b/manage_externals/.github/PULL_REQUEST_TEMPLATE.md
deleted file mode 100644
index b68b1fb5e2..0000000000
--- a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md
+++ /dev/null
@@ -1,17 +0,0 @@
-[ 50 character, one line summary ]
-
-[ Description of the changes in this commit. It should be enough
- information for someone not following this development to understand.
- Lines should be wrapped at about 72 characters. ]
-
-User interface changes?: [ No/Yes ]
-[ If yes, describe what changed, and steps taken to ensure backward compatibilty ]
-
-Fixes: [Github issue #s] And brief description of each issue.
-
-Testing:
- test removed:
- unit tests:
- system tests:
- manual testing:
-
diff --git a/manage_externals/.github/workflows/bumpversion.yml b/manage_externals/.github/workflows/bumpversion.yml
deleted file mode 100644
index f4dc9b7ca5..0000000000
--- a/manage_externals/.github/workflows/bumpversion.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-name: Bump version
-on:
- push:
- branches:
- - main
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Bump version and push tag
- id: tag_version
- uses: mathieudutour/github-tag-action@v5.5
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- create_annotated_tag: true
- default_bump: patch
- dry_run: false
- tag_prefix: manic-
diff --git a/manage_externals/.github/workflows/tests.yml b/manage_externals/.github/workflows/tests.yml
deleted file mode 100644
index dd75b91b49..0000000000
--- a/manage_externals/.github/workflows/tests.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-# This is a workflow to compile the cmeps source without cime
-name: Test Manic
-
-# Controls when the action will run. Triggers the workflow on push or pull request
-# events but only for the master branch
-on:
- push:
- branches: [ main ]
- pull_request:
- branches: [ main ]
-
-# A workflow run is made up of one or more jobs that can run sequentially or in parallel
-jobs:
- test-manic:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Test Manic
- run: |
- pushd test
- git config --global user.email "devnull@example.com"
- git config --global user.name "GITHUB tester"
- git config --global protocol.file.allow always
- make utest
- make stest
- popd
-
- - name: Setup tmate session
- if: ${{ failure() }}
- uses: mxschmitt/action-tmate@v3
diff --git a/manage_externals/.gitignore b/manage_externals/.gitignore
deleted file mode 100644
index a71ac0cd75..0000000000
--- a/manage_externals/.gitignore
+++ /dev/null
@@ -1,17 +0,0 @@
-# directories that are checked out by the tool
-cime/
-cime_config/
-components/
-
-# generated local files
-*.log
-
-# editor files
-*~
-*.bak
-
-# generated python files
-*.pyc
-
-# test tmp file
-test/tmp
diff --git a/manage_externals/.travis.yml b/manage_externals/.travis.yml
deleted file mode 100644
index d9b24c584d..0000000000
--- a/manage_externals/.travis.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-language: python
-os: linux
-python:
- - "3.4"
- - "3.5"
- - "3.6"
- - "3.7"
- - "3.8"
-install:
- - pip install -r test/requirements.txt
-before_script:
- - git --version
-script:
- - cd test; make test
- - cd test; make lint
-after_success:
- - cd test; make coverage
- - cd test; coveralls
diff --git a/manage_externals/LICENSE.txt b/manage_externals/LICENSE.txt
deleted file mode 100644
index 665ee03fbc..0000000000
--- a/manage_externals/LICENSE.txt
+++ /dev/null
@@ -1,34 +0,0 @@
-Copyright (c) 2017-2018, University Corporation for Atmospheric Research (UCAR)
-All rights reserved.
-
-Developed by:
- University Corporation for Atmospheric Research - National Center for Atmospheric Research
- https://www2.cesm.ucar.edu/working-groups/sewg
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the "Software"),
-to deal with the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom
-the Software is furnished to do so, subject to the following conditions:
-
- - Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimers.
- - Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimers in the documentation
- and/or other materials provided with the distribution.
- - Neither the names of [Name of Development Group, UCAR],
- nor the names of its contributors may be used to endorse or promote
- products derived from this Software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/manage_externals/README.md b/manage_externals/README.md
deleted file mode 100644
index 9475301b5d..0000000000
--- a/manage_externals/README.md
+++ /dev/null
@@ -1,231 +0,0 @@
--- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --
-
-[![Build Status](https://travis-ci.org/ESMCI/manage_externals.svg?branch=master)](https://travis-ci.org/ESMCI/manage_externals)[![Coverage Status](https://coveralls.io/repos/github/ESMCI/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/ESMCI/manage_externals?branch=master)
-```
-usage: checkout_externals [-h] [-e [EXTERNALS]] [-o] [-S] [-v] [--backtrace]
- [-d] [--no-logging]
-
-checkout_externals manages checking out groups of externals from revision
-control based on a externals description file. By default only the
-required externals are checkout out.
-
-Operations performed by manage_externals utilities are explicit and
-data driven. checkout_externals will always make the working copy *exactly*
-match what is in the externals file when modifying the working copy of
-a repository.
-
-If checkout_externals isn't doing what you expected, double check the contents
-of the externals description file.
-
-Running checkout_externals without the '--status' option will always attempt to
-synchronize the working copy to exactly match the externals description.
-
-optional arguments:
- -h, --help show this help message and exit
- -e [EXTERNALS], --externals [EXTERNALS]
- The externals description filename. Default:
- Externals.cfg.
- -o, --optional By default only the required externals are checked
- out. This flag will also checkout the optional
- externals.
- -S, --status Output status of the repositories managed by
- checkout_externals. By default only summary
- information is provided. Use verbose output to see
- details.
- -v, --verbose Output additional information to the screen and log
- file. This flag can be used up to two times,
- increasing the verbosity level each time.
- --backtrace DEVELOPER: show exception backtraces as extra
- debugging output
- -d, --debug DEVELOPER: output additional debugging information to
- the screen and log file.
- --no-logging DEVELOPER: disable logging.
-
-```
-NOTE: checkout_externals *MUST* be run from the root of the source tree it
-is managing. For example, if you cloned a repository with:
-
- $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev
-
-Then the root of the source tree is /path/to/some-project-dev. If you
-obtained a sub-project via a checkout of another project:
-
- $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev
-
-and you need to checkout the sub-project externals, then the root of the
-source tree is /path/to/some-project-dev. Do *NOT* run checkout_externals
-from within /path/to/some-project-dev/sub-project
-
-The root of the source tree will be referred to as `${SRC_ROOT}` below.
-
-# Supported workflows
-
- * Checkout all required components from the default externals
- description file:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/checkout_externals
-
- * To update all required components to the current values in the
- externals description file, re-run checkout_externals:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/checkout_externals
-
- If there are *any* modifications to *any* working copy according
- to the git or svn 'status' command, checkout_externals
- will not update any external repositories. Modifications
- include: modified files, added files, removed files, or missing
- files.
-
- To avoid this safety check, edit the externals description file
- and comment out the modified external block.
-
- * Checkout all required components from a user specified externals
- description file:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/checkout_externals --externals my-externals.cfg
-
- * Status summary of the repositories managed by checkout_externals:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/checkout_externals --status
-
- ./cime
- s ./components/cism
- ./components/mosart
- e-o ./components/rtm
- M ./src/fates
- e-o ./tools/PTCLM
-
- where:
- * column one indicates the status of the repository in relation
- to the externals description file.
- * column two indicates whether the working copy has modified files.
- * column three shows how the repository is managed, optional or required
-
- Column one will be one of these values:
- * s : out-of-sync : repository is checked out at a different commit
- compared with the externals description
- * e : empty : directory does not exist - checkout_externals has not been run
- * ? : unknown : directory exists but .git or .svn directories are missing
-
- Column two will be one of these values:
- * M : Modified : modified, added, deleted or missing files
- * : blank / space : clean
- * - : dash : no meaningful state, for empty repositories
-
- Column three will be one of these values:
- * o : optional : optionally repository
- * : blank / space : required repository
-
- * Detailed git or svn status of the repositories managed by checkout_externals:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/checkout_externals --status --verbose
-
-# Externals description file
-
- The externals description contains a list of the external
- repositories that are used and their version control locations. The
- file format is the standard ini/cfg configuration file format. Each
- external is defined by a section containing the component name in
- square brackets:
-
- * name (string) : component name, e.g. [cime], [cism], etc.
-
- Each section has the following keyword-value pairs:
-
- * required (boolean) : whether the component is a required checkout,
- 'true' or 'false'.
-
- * local_path (string) : component path *relative* to where
- checkout_externals is called.
-
- * protoctol (string) : version control protocol that is used to
- manage the component. Valid values are 'git', 'svn',
- 'externals_only'.
-
- Switching an external between different protocols is not
- supported, e.g. from svn to git. To switch protocols, you need to
- manually move the old working copy to a new location.
-
- Note: 'externals_only' will only process the external's own
- external description file without trying to manage a repository
- for the component. This is used for retreiving externals for
- standalone components like cam and clm. If the source root of the
- externals_only component is the same as the main source root, then
- the local path must be set to '.', the unix current working
- directory, e. g. 'local_path = .'
-
- * repo_url (string) : URL for the repository location, examples:
- * https://svn-ccsm-models.cgd.ucar.edu/glc
- * git@github.com:esmci/cime.git
- * /path/to/local/repository
- * .
-
- NOTE: To operate on only the local clone and and ignore remote
- repositories, set the url to '.' (the unix current path),
- i.e. 'repo_url = .' . This can be used to checkout a local branch
- instead of the upstream branch.
-
- If a repo url is determined to be a local path (not a network url)
- then user expansion, e.g. ~/, and environment variable expansion,
- e.g. $HOME or $REPO_ROOT, will be performed.
-
- Relative paths are difficult to get correct, especially for mixed
- use repos. It is advised that local paths expand to absolute paths.
- If relative paths are used, they should be relative to one level
- above local_path. If local path is 'src/foo', the the relative url
- should be relative to 'src'.
-
- * tag (string) : tag to checkout
-
- * hash (string) : the git hash to checkout. Only applies to git
- repositories.
-
- * branch (string) : branch to checkout from the specified
- repository. Specifying a branch on a remote repository means that
- checkout_externals will checkout the version of the branch in the remote,
- not the the version in the local repository (if it exists).
-
- Note: one and only one of tag, branch hash must be supplied.
-
- * externals (string) : used to make manage_externals aware of
- sub-externals required by an external. This is a relative path to
- the external's root directory. For example, the main externals
- description has an external checkout out at 'src/useful_library'.
- useful_library requires additional externals to be complete.
- Those additional externals are managed from the source root by the
- externals description file pointed 'useful_library/sub-xternals.cfg',
- Then the main 'externals' field in the top level repo should point to
- 'sub-externals.cfg'.
- Note that by default, `checkout_externals` will clone an external's
- submodules. As a special case, the entry, `externals = None`, will
- prevent this behavior. For more control over which externals are
- checked out, create an externals file (and see the `from_submodule`
- configuration entry below).
-
- * from_submodule (True / False) : used to pull the repo_url, local_path,
- and hash properties for this external from the .gitmodules file in
- this repository. Note that the section name (the entry in square
- brackets) must match the name in the .gitmodules file.
- If from_submodule is True, the protocol must be git and no repo_url,
- local_path, hash, branch, or tag entries are allowed.
- Default: False
-
- * sparse (string) : used to control a sparse checkout. This optional
- entry should point to a filename (path relative to local_path) that
- contains instructions on which repository paths to include (or
- exclude) from the working tree.
- See the "SPARSE CHECKOUT" section of https://git-scm.com/docs/git-read-tree
- Default: sparse checkout is disabled
-
- * Lines begining with '#' or ';' are comments and will be ignored.
-
-# Obtaining this tool, reporting issues, etc.
-
- The master repository for manage_externals is
- https://github.com/ESMCI/manage_externals. Any issues with this tool
- should be reported there.
diff --git a/manage_externals/README_FIRST b/manage_externals/README_FIRST
deleted file mode 100644
index c8a47d7806..0000000000
--- a/manage_externals/README_FIRST
+++ /dev/null
@@ -1,54 +0,0 @@
-CESM is comprised of a number of different components that are
-developed and managed independently. Each component may have
-additional 'external' dependancies and optional parts that are also
-developed and managed independently.
-
-The checkout_externals.py tool manages retreiving and updating the
-components and their externals so you have a complete set of source
-files for the model.
-
-checkout_externals.py relies on a model description file that
-describes what components are needed, where to find them and where to
-put them in the source tree. The default file is called "CESM.xml"
-regardless of whether you are checking out CESM or a standalone
-component.
-
-checkout_externals requires access to git and svn repositories that
-require authentication. checkout_externals may pass through
-authentication requests, but it will not cache them for you. For the
-best and most robust user experience, you should have svn and git
-working without password authentication. See:
-
- https://help.github.com/articles/connecting-to-github-with-ssh/
-
- ?svn ref?
-
-NOTE: checkout_externals.py *MUST* be run from the root of the source
-tree it is managing. For example, if you cloned CLM with:
-
- $ git clone git@github.com/ncar/clm clm-dev
-
-Then the root of the source tree is /path/to/cesm-dev. If you obtained
-CLM via an svn checkout of CESM and you need to checkout the CLM
-externals, then the root of the source tree for CLM is:
-
- /path/to/cesm-dev/components/clm
-
-The root of the source tree will be referred to as ${SRC_ROOT} below.
-
-To get started quickly, checkout all required components from the
-default model description file:
-
- $ cd ${SRC_ROOT}
- $ ./checkout_cesm/checkout_externals.py
-
-For additional information about using checkout model, please see:
-
- ${SRC_ROOT}/checkout_cesm/README
-
-or run:
-
- $ cd ${SRC_ROOT}
- $ ./checkout_cesm/checkout_externals.py --help
-
-
diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals
index 48bce24010..5f848f5da9 100755
--- a/manage_externals/checkout_externals
+++ b/manage_externals/checkout_externals
@@ -1,36 +1,3 @@
-#!/usr/bin/env python3
-
-"""Main driver wrapper around the manic/checkout utility.
-
-Tool to assemble external respositories represented in an externals
-description file.
-
-"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import sys
-import traceback
-
-import manic
-
-if sys.hexversion < 0x02070000:
- print(70 * '*')
- print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0]))
- print('It appears that you are running python {0}'.format(
- '.'.join(str(x) for x in sys.version_info[0:3])))
- print(70 * '*')
- sys.exit(1)
-
-
-if __name__ == '__main__':
- ARGS = manic.checkout.commandline_arguments()
- try:
- RET_STATUS, _ = manic.checkout.main(ARGS)
- sys.exit(RET_STATUS)
- except Exception as error: # pylint: disable=broad-except
- manic.printlog(str(error))
- if ARGS.backtrace:
- traceback.print_exc()
- sys.exit(1)
+echo "ERROR: Instead of ./manage_externals/checkout_externals"
+echo "please type './bin/git-fleximod update'"
+echo "For additional information, please type './bin/git-fleximod --help'"
diff --git a/manage_externals/manic/__init__.py b/manage_externals/manic/__init__.py
deleted file mode 100644
index 11badedd3b..0000000000
--- a/manage_externals/manic/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""Public API for the manage_externals library
-"""
-
-from manic import checkout
-from manic.utils import printlog
-
-__all__ = [
- 'checkout', 'printlog',
-]
diff --git a/manage_externals/manic/checkout.py b/manage_externals/manic/checkout.py
deleted file mode 100755
index 3f5537adce..0000000000
--- a/manage_externals/manic/checkout.py
+++ /dev/null
@@ -1,446 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-Tool to assemble repositories represented in a model-description file.
-
-If loaded as a module (e.g., in a component's buildcpp), it can be used
-to check the validity of existing subdirectories and load missing sources.
-"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import argparse
-import logging
-import os
-import os.path
-import sys
-
-from manic.externals_description import create_externals_description
-from manic.externals_description import read_externals_description_file
-from manic.externals_status import check_safe_to_update_repos
-from manic.sourcetree import SourceTree
-from manic.utils import printlog, fatal_error
-from manic.global_constants import VERSION_SEPERATOR, LOG_FILE_NAME
-
-if sys.hexversion < 0x02070000:
- print(70 * '*')
- print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0]))
- print('It appears that you are running python {0}'.format(
- VERSION_SEPERATOR.join(str(x) for x in sys.version_info[0:3])))
- print(70 * '*')
- sys.exit(1)
-
-
-# ---------------------------------------------------------------------
-#
-# User input
-#
-# ---------------------------------------------------------------------
-def commandline_arguments(args=None):
- """Process the command line arguments
-
- Params: args - optional args. Should only be used during systems
- testing.
-
- Returns: processed command line arguments
- """
- description = '''
-
-%(prog)s manages checking out groups of externals from revision
-control based on an externals description file. By default only the
-required externals are checkout out.
-
-Running %(prog)s without the '--status' option will always attempt to
-synchronize the working copy to exactly match the externals description.
-'''
-
- epilog = '''
-```
-NOTE: %(prog)s *MUST* be run from the root of the source tree it
-is managing. For example, if you cloned a repository with:
-
- $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev
-
-Then the root of the source tree is /path/to/some-project-dev. If you
-obtained a sub-project via a checkout of another project:
-
- $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev
-
-and you need to checkout the sub-project externals, then the root of the
-source tree remains /path/to/some-project-dev. Do *NOT* run %(prog)s
-from within /path/to/some-project-dev/sub-project
-
-The root of the source tree will be referred to as `${SRC_ROOT}` below.
-
-
-# Supported workflows
-
- * Checkout all required components from the default externals
- description file:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/%(prog)s
-
- * To update all required components to the current values in the
- externals description file, re-run %(prog)s:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/%(prog)s
-
- If there are *any* modifications to *any* working copy according
- to the git or svn 'status' command, %(prog)s
- will not update any external repositories. Modifications
- include: modified files, added files, removed files, or missing
- files.
-
- To avoid this safety check, edit the externals description file
- and comment out the modified external block.
-
- * Checkout all required components from a user specified externals
- description file:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/%(prog)s --externals my-externals.cfg
-
- * Status summary of the repositories managed by %(prog)s:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/%(prog)s --status
-
- ./cime
- s ./components/cism
- ./components/mosart
- e-o ./components/rtm
- M ./src/fates
- e-o ./tools/PTCLM
-
-
- where:
- * column one indicates the status of the repository in relation
- to the externals description file.
- * column two indicates whether the working copy has modified files.
- * column three shows how the repository is managed, optional or required
-
- Column one will be one of these values:
- * s : out-of-sync : repository is checked out at a different commit
- compared with the externals description
- * e : empty : directory does not exist - %(prog)s has not been run
- * ? : unknown : directory exists but .git or .svn directories are missing
-
- Column two will be one of these values:
- * M : Modified : modified, added, deleted or missing files
- * : blank / space : clean
- * - : dash : no meaningful state, for empty repositories
-
- Column three will be one of these values:
- * o : optional : optionally repository
- * : blank / space : required repository
-
- * Detailed git or svn status of the repositories managed by %(prog)s:
-
- $ cd ${SRC_ROOT}
- $ ./manage_externals/%(prog)s --status --verbose
-
-# Externals description file
-
- The externals description contains a list of the external
- repositories that are used and their version control locations. The
- file format is the standard ini/cfg configuration file format. Each
- external is defined by a section containing the component name in
- square brackets:
-
- * name (string) : component name, e.g. [cime], [cism], etc.
-
- Each section has the following keyword-value pairs:
-
- * required (boolean) : whether the component is a required checkout,
- 'true' or 'false'.
-
- * local_path (string) : component path *relative* to where
- %(prog)s is called.
-
- * protoctol (string) : version control protocol that is used to
- manage the component. Valid values are 'git', 'svn',
- 'externals_only'.
-
- Switching an external between different protocols is not
- supported, e.g. from svn to git. To switch protocols, you need to
- manually move the old working copy to a new location.
-
- Note: 'externals_only' will only process the external's own
- external description file without trying to manage a repository
- for the component. This is used for retrieving externals for
- standalone components like cam and ctsm which also serve as
- sub-components within a larger project. If the source root of the
- externals_only component is the same as the main source root, then
- the local path must be set to '.', the unix current working
- directory, e. g. 'local_path = .'
-
- * repo_url (string) : URL for the repository location, examples:
- * https://svn-ccsm-models.cgd.ucar.edu/glc
- * git@github.com:esmci/cime.git
- * /path/to/local/repository
- * .
-
- NOTE: To operate on only the local clone and and ignore remote
- repositories, set the url to '.' (the unix current path),
- i.e. 'repo_url = .' . This can be used to checkout a local branch
- instead of the upstream branch.
-
- If a repo url is determined to be a local path (not a network url)
- then user expansion, e.g. ~/, and environment variable expansion,
- e.g. $HOME or $REPO_ROOT, will be performed.
-
- Relative paths are difficult to get correct, especially for mixed
- use repos. It is advised that local paths expand to absolute paths.
- If relative paths are used, they should be relative to one level
- above local_path. If local path is 'src/foo', the the relative url
- should be relative to 'src'.
-
- * tag (string) : tag to checkout
-
- * hash (string) : the git hash to checkout. Only applies to git
- repositories.
-
- * branch (string) : branch to checkout from the specified
- repository. Specifying a branch on a remote repository means that
- %(prog)s will checkout the version of the branch in the remote,
- not the the version in the local repository (if it exists).
-
- Note: one and only one of tag, branch hash must be supplied.
-
- * externals (string) : used to make manage_externals aware of
- sub-externals required by an external. This is a relative path to
- the external's root directory. For example, if LIBX is often used
- as a sub-external, it might have an externals file (for its
- externals) called Externals_LIBX.cfg. To use libx as a standalone
- checkout, it would have another file, Externals.cfg with the
- following entry:
-
- [ libx ]
- local_path = .
- protocol = externals_only
- externals = Externals_LIBX.cfg
- required = True
-
- Now, %(prog)s will process Externals.cfg and also process
- Externals_LIBX.cfg as if it was a sub-external.
-
- Note that by default, checkout_externals will clone an external's
- submodules. As a special case, the entry, "externals = None", will
- prevent this behavior. For more control over which externals are
- checked out, create an externals file (and see the from_submodule
- configuration entry below).
-
- * from_submodule (True / False) : used to pull the repo_url, local_path,
- and hash properties for this external from the .gitmodules file in
- this repository. Note that the section name (the entry in square
- brackets) must match the name in the .gitmodules file.
- If from_submodule is True, the protocol must be git and no repo_url,
- local_path, hash, branch, or tag entries are allowed.
- Default: False
-
- * sparse (string) : used to control a sparse checkout. This optional
- entry should point to a filename (path relative to local_path) that
- contains instructions on which repository paths to include (or
- exclude) from the working tree.
- See the "SPARSE CHECKOUT" section of https://git-scm.com/docs/git-read-tree
- Default: sparse checkout is disabled
-
- * Lines beginning with '#' or ';' are comments and will be ignored.
-
-# Obtaining this tool, reporting issues, etc.
-
- The master repository for manage_externals is
- https://github.com/ESMCI/manage_externals. Any issues with this tool
- should be reported there.
-
-# Troubleshooting
-
-Operations performed by manage_externals utilities are explicit and
-data driven. %(prog)s will always attempt to make the working copy
-*exactly* match what is in the externals file when modifying the
-working copy of a repository.
-
-If %(prog)s is not doing what you expected, double check the contents
-of the externals description file or examine the output of
-./manage_externals/%(prog)s --status
-
-'''
-
- parser = argparse.ArgumentParser(
- description=description, epilog=epilog,
- formatter_class=argparse.RawDescriptionHelpFormatter)
-
- #
- # user options
- #
- parser.add_argument("components", nargs="*",
- help="Specific component(s) to checkout. By default, "
- "all required externals are checked out.")
-
- parser.add_argument('-e', '--externals', nargs='?',
- default='Externals.cfg',
- help='The externals description filename. '
- 'Default: %(default)s.')
-
- parser.add_argument('-x', '--exclude', nargs='*',
- help='Component(s) listed in the externals file which should be ignored.')
-
- parser.add_argument('-o', '--optional', action='store_true', default=False,
- help='By default only the required externals '
- 'are checked out. This flag will also checkout the '
- 'optional externals.')
-
- parser.add_argument('-S', '--status', action='store_true', default=False,
- help='Output the status of the repositories managed by '
- '%(prog)s. By default only summary information '
- 'is provided. Use the verbose option to see details.')
-
- parser.add_argument('-v', '--verbose', action='count', default=0,
- help='Output additional information to '
- 'the screen and log file. This flag can be '
- 'used up to two times, increasing the '
- 'verbosity level each time.')
-
- parser.add_argument('--svn-ignore-ancestry', action='store_true', default=False,
- help='By default, subversion will abort if a component is '
- 'already checked out and there is no common ancestry with '
- 'the new URL. This flag passes the "--ignore-ancestry" flag '
- 'to the svn switch call. (This is not recommended unless '
- 'you are sure about what you are doing.)')
-
- #
- # developer options
- #
- parser.add_argument('--backtrace', action='store_true',
- help='DEVELOPER: show exception backtraces as extra '
- 'debugging output')
-
- parser.add_argument('-d', '--debug', action='store_true', default=False,
- help='DEVELOPER: output additional debugging '
- 'information to the screen and log file.')
-
- logging_group = parser.add_mutually_exclusive_group()
-
- logging_group.add_argument('--logging', dest='do_logging',
- action='store_true',
- help='DEVELOPER: enable logging.')
- logging_group.add_argument('--no-logging', dest='do_logging',
- action='store_false', default=False,
- help='DEVELOPER: disable logging '
- '(this is the default)')
-
- if args:
- options = parser.parse_args(args)
- else:
- options = parser.parse_args()
- return options
-
-def _dirty_local_repo_msg(program_name, config_file):
- return """The external repositories labeled with 'M' above are not in a clean state.
-The following are four options for how to proceed:
-(1) Go into each external that is not in a clean state and issue either a 'git status' or
- an 'svn status' command (depending on whether the external is managed by git or
- svn). Either revert or commit your changes so that all externals are in a clean
- state. (To revert changes in git, follow the instructions given when you run 'git
- status'.) (Note, though, that it is okay to have untracked files in your working
- directory.) Then rerun {program_name}.
-(2) Alternatively, you do not have to rely on {program_name}. Instead, you can manually
- update out-of-sync externals (labeled with 's' above) as described in the
- configuration file {config_file}. (For example, run 'git fetch' and 'git checkout'
- commands to checkout the appropriate tags for each external, as given in
- {config_file}.)
-(3) You can also use {program_name} to manage most, but not all externals: You can specify
- one or more externals to ignore using the '-x' or '--exclude' argument to
- {program_name}. Excluding externals labeled with 'M' will allow {program_name} to
- update the other, non-excluded externals.
-(4) As a last resort, if you are confident that there is no work that needs to be saved
- from a given external, you can remove that external (via "rm -rf [directory]") and
- then rerun the {program_name} tool. This option is mainly useful as a workaround for
- issues with this tool (such as https://github.com/ESMCI/manage_externals/issues/157).
-The external repositories labeled with '?' above are not under version
-control using the expected protocol. If you are sure you want to switch
-protocols, and you don't have any work you need to save from this
-directory, then run "rm -rf [directory]" before rerunning the
-{program_name} tool.
-""".format(program_name=program_name, config_file=config_file)
-# ---------------------------------------------------------------------
-#
-# main
-#
-# ---------------------------------------------------------------------
-def main(args):
- """
- Function to call when module is called from the command line.
- Parse externals file and load required repositories or all repositories if
- the --all option is passed.
-
- Returns a tuple (overall_status, tree_status). overall_status is 0
- on success, non-zero on failure. tree_status is a dict mapping local path
- to ExternalStatus -- if no checkout is happening. If checkout is happening, tree_status
- is None.
- """
- if args.do_logging:
- logging.basicConfig(filename=LOG_FILE_NAME,
- format='%(levelname)s : %(asctime)s : %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S',
- level=logging.DEBUG)
-
- program_name = os.path.basename(sys.argv[0])
- logging.info('Beginning of %s', program_name)
-
- load_all = False
- if args.optional:
- load_all = True
-
- root_dir = os.path.abspath(os.getcwd())
- model_data = read_externals_description_file(root_dir, args.externals)
- ext_description = create_externals_description(
- model_data, components=args.components, exclude=args.exclude)
-
- for comp in args.components:
- if comp not in ext_description.keys():
- # Note we can't print out the list of found externals because
- # they were filtered in create_externals_description above.
- fatal_error(
- "No component {} found in {}".format(
- comp, args.externals))
-
- source_tree = SourceTree(root_dir, ext_description, svn_ignore_ancestry=args.svn_ignore_ancestry)
- if args.components:
- components_str = 'specified components'
- else:
- components_str = 'required & optional components'
- printlog('Checking local status of ' + components_str + ': ', end='')
- tree_status = source_tree.status(print_progress=True)
- printlog('')
-
- if args.status:
- # user requested status-only
- for comp in sorted(tree_status):
- tree_status[comp].log_status_message(args.verbose)
- else:
- # checkout / update the external repositories.
- safe_to_update = check_safe_to_update_repos(tree_status)
- if not safe_to_update:
- # print status
- for comp in sorted(tree_status):
- tree_status[comp].log_status_message(args.verbose)
- # exit gracefully
- printlog('-' * 70)
- printlog(_dirty_local_repo_msg(program_name, args.externals))
- printlog('-' * 70)
- else:
- if not args.components:
- source_tree.checkout(args.verbose, load_all)
- for comp in args.components:
- source_tree.checkout(args.verbose, load_all, load_comp=comp)
- printlog('')
- # New tree status is unknown, don't return anything.
- tree_status = None
-
- logging.info('%s completed without exceptions.', program_name)
- # NOTE(bja, 2017-11) tree status is used by the systems tests
- return 0, tree_status
diff --git a/manage_externals/manic/externals_description.py b/manage_externals/manic/externals_description.py
deleted file mode 100644
index 546e7fdcb4..0000000000
--- a/manage_externals/manic/externals_description.py
+++ /dev/null
@@ -1,830 +0,0 @@
-#!/usr/bin/env python3
-
-"""Model description
-
-Model description is the representation of the various externals
-included in the model. It processes in input data structure, and
-converts it into a standard interface that is used by the rest of the
-system.
-
-To maintain backward compatibility, externals description files should
-follow semantic versioning rules, http://semver.org/
-
-
-
-"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import logging
-import os
-import os.path
-import re
-
-# ConfigParser in python2 was renamed to configparser in python3.
-# In python2, ConfigParser returns byte strings, str, instead of unicode.
-# We need unicode to be compatible with xml and json parser and python3.
-try:
- # python2
- from ConfigParser import SafeConfigParser as config_parser
- from ConfigParser import MissingSectionHeaderError
- from ConfigParser import NoSectionError, NoOptionError
-
- USE_PYTHON2 = True
-
- def config_string_cleaner(text):
- """convert strings into unicode
- """
- return text.decode('utf-8')
-except ImportError:
- # python3
- from configparser import ConfigParser as config_parser
- from configparser import MissingSectionHeaderError
- from configparser import NoSectionError, NoOptionError
-
- USE_PYTHON2 = False
-
- def config_string_cleaner(text):
- """Python3 already uses unicode strings, so just return the string
- without modification.
-
- """
- return text
-
-from .utils import printlog, fatal_error, str_to_bool, expand_local_url
-from .utils import execute_subprocess
-from .global_constants import EMPTY_STR, PPRINTER, VERSION_SEPERATOR
-
-#
-# Globals
-#
-DESCRIPTION_SECTION = 'externals_description'
-VERSION_ITEM = 'schema_version'
-
-
-def read_externals_description_file(root_dir, file_name):
- """Read a file containing an externals description and
- create its internal representation.
-
- """
- root_dir = os.path.abspath(root_dir)
- msg = 'In directory : {0}'.format(root_dir)
- logging.info(msg)
- printlog('Processing externals description file : {0} ({1})'.format(file_name,
- root_dir))
-
- file_path = os.path.join(root_dir, file_name)
- if not os.path.exists(file_name):
- if file_name.lower() == "none":
- msg = ('INTERNAL ERROR: Attempt to read externals file '
- 'from {0} when not configured'.format(file_path))
- else:
- msg = ('ERROR: Model description file, "{0}", does not '
- 'exist at path:\n {1}\nDid you run from the root of '
- 'the source tree?'.format(file_name, file_path))
-
- fatal_error(msg)
-
- externals_description = None
- if file_name == ExternalsDescription.GIT_SUBMODULES_FILENAME:
- externals_description = _read_gitmodules_file(root_dir, file_name)
- else:
- try:
- config = config_parser()
- config.read(file_path)
- externals_description = config
- except MissingSectionHeaderError:
- # not a cfg file
- pass
-
- if externals_description is None:
- msg = 'Unknown file format!'
- fatal_error(msg)
-
- return externals_description
-
-class LstripReader(object):
- "LstripReader formats .gitmodules files to be acceptable for configparser"
- def __init__(self, filename):
- with open(filename, 'r') as infile:
- lines = infile.readlines()
- self._lines = list()
- self._num_lines = len(lines)
- self._index = 0
- for line in lines:
- self._lines.append(line.lstrip())
-
- def readlines(self):
- """Return all the lines from this object's file"""
- return self._lines
-
- def readline(self, size=-1):
- """Format and return the next line or raise StopIteration"""
- try:
- line = self.next()
- except StopIteration:
- line = ''
-
- if (size > 0) and (len(line) < size):
- return line[0:size]
-
- return line
-
- def __iter__(self):
- """Begin an iteration"""
- self._index = 0
- return self
-
- def next(self):
- """Return the next line or raise StopIteration"""
- if self._index >= self._num_lines:
- raise StopIteration
-
- self._index = self._index + 1
- return self._lines[self._index - 1]
-
- def __next__(self):
- return self.next()
-
-def git_submodule_status(repo_dir):
- """Run the git submodule status command to obtain submodule hashes.
- """
- # This function is here instead of GitRepository to avoid a dependency loop
- cmd = 'git -C {repo_dir} submodule status'.format(
- repo_dir=repo_dir).split()
- git_output = execute_subprocess(cmd, output_to_caller=True)
- submodules = {}
- submods = git_output.split('\n')
- for submod in submods:
- if submod:
- status = submod[0]
- items = submod[1:].split(' ')
- if len(items) > 2:
- tag = items[2]
- else:
- tag = None
-
- submodules[items[1]] = {'hash':items[0], 'status':status, 'tag':tag}
-
- return submodules
-
-def parse_submodules_desc_section(section_items, file_path):
- """Find the path and url for this submodule description"""
- path = None
- url = None
- for item in section_items:
- name = item[0].strip().lower()
- if name == 'path':
- path = item[1].strip()
- elif name == 'url':
- url = item[1].strip()
- elif name == 'branch':
- # We do not care about branch since we have a hash - silently ignore
- pass
- else:
- msg = 'WARNING: Ignoring unknown {} property, in {}'
- msg = msg.format(item[0], file_path) # fool pylint
- logging.warning(msg)
-
- return path, url
-
-def _read_gitmodules_file(root_dir, file_name):
- # pylint: disable=deprecated-method
- # Disabling this check because the method is only used for python2
- # pylint: disable=too-many-locals
- # pylint: disable=too-many-branches
- # pylint: disable=too-many-statements
- """Read a .gitmodules file and convert it to be compatible with an
- externals description.
- """
- root_dir = os.path.abspath(root_dir)
- msg = 'In directory : {0}'.format(root_dir)
- logging.info(msg)
-
- file_path = os.path.join(root_dir, file_name)
- if not os.path.exists(file_name):
- msg = ('ERROR: submodules description file, "{0}", does not '
- 'exist in dir:\n {1}'.format(file_name, root_dir))
- fatal_error(msg)
-
- submodules_description = None
- externals_description = None
- try:
- config = config_parser()
- if USE_PYTHON2:
- config.readfp(LstripReader(file_path), filename=file_name)
- else:
- config.read_file(LstripReader(file_path), source=file_name)
-
- submodules_description = config
- except MissingSectionHeaderError:
- # not a cfg file
- pass
-
- if submodules_description is None:
- msg = 'Unknown file format!'
- fatal_error(msg)
- else:
- # Convert the submodules description to an externals description
- externals_description = config_parser()
- # We need to grab all the commit hashes for this repo
- submods = git_submodule_status(root_dir)
- for section in submodules_description.sections():
- if section[0:9] == 'submodule':
- sec_name = section[9:].strip(' "')
- externals_description.add_section(sec_name)
- section_items = submodules_description.items(section)
- path, url = parse_submodules_desc_section(section_items,
- file_path)
-
- if path is None:
- msg = 'Submodule {} missing path'.format(sec_name)
- fatal_error(msg)
-
- if url is None:
- msg = 'Submodule {} missing url'.format(sec_name)
- fatal_error(msg)
-
- externals_description.set(sec_name,
- ExternalsDescription.PATH, path)
- externals_description.set(sec_name,
- ExternalsDescription.PROTOCOL, 'git')
- externals_description.set(sec_name,
- ExternalsDescription.REPO_URL, url)
- externals_description.set(sec_name,
- ExternalsDescription.REQUIRED, 'True')
- if sec_name in submods:
- submod_name = sec_name
- else:
- # The section name does not have to match the path
- submod_name = path
-
- if submod_name in submods:
- git_hash = submods[submod_name]['hash']
- externals_description.set(sec_name,
- ExternalsDescription.HASH,
- git_hash)
- else:
- emsg = "submodule status has no section, '{}'"
- emsg += "\nCheck section names in externals config file"
- fatal_error(emsg.format(submod_name))
-
- # Required items
- externals_description.add_section(DESCRIPTION_SECTION)
- externals_description.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0')
-
- return externals_description
-
-def create_externals_description(
- model_data, model_format='cfg', components=None, exclude=None, parent_repo=None):
- """Create the a externals description object from the provided data
-
- components: list of component names to include, None to include all. If a
- name isn't found, it is silently omitted from the return value.
- exclude: list of component names to skip.
- """
- externals_description = None
- if model_format == 'dict':
- externals_description = ExternalsDescriptionDict(
- model_data, components=components, exclude=exclude)
- elif model_format == 'cfg':
- major, _, _ = get_cfg_schema_version(model_data)
- if major == 1:
- externals_description = ExternalsDescriptionConfigV1(
- model_data, components=components, exclude=exclude, parent_repo=parent_repo)
- else:
- msg = ('Externals description file has unsupported schema '
- 'version "{0}".'.format(major))
- fatal_error(msg)
- else:
- msg = 'Unknown model data format "{0}"'.format(model_format)
- fatal_error(msg)
- return externals_description
-
-
-def get_cfg_schema_version(model_cfg):
- """Extract the major, minor, patch version of the config file schema
-
- Params:
- model_cfg - config parser object containing the externas description data
-
- Returns:
- major = integer major version
- minor = integer minor version
- patch = integer patch version
- """
- semver_str = ''
- try:
- semver_str = model_cfg.get(DESCRIPTION_SECTION, VERSION_ITEM)
- except (NoSectionError, NoOptionError):
- msg = ('externals description file must have the required '
- 'section: "{0}" and item "{1}"'.format(DESCRIPTION_SECTION,
- VERSION_ITEM))
- fatal_error(msg)
-
- # NOTE(bja, 2017-11) Assume we don't care about the
- # build/pre-release metadata for now!
- version_list = re.split(r'[-+]', semver_str)
- version_str = version_list[0]
- version = version_str.split(VERSION_SEPERATOR)
- try:
- major = int(version[0].strip())
- minor = int(version[1].strip())
- patch = int(version[2].strip())
- except ValueError:
- msg = ('Config file schema version must have integer digits for '
- 'major, minor and patch versions. '
- 'Received "{0}"'.format(version_str))
- fatal_error(msg)
- return major, minor, patch
-
-
-class ExternalsDescription(dict):
- """Base externals description class that is independent of the user input
- format. Different input formats can all be converted to this
- representation to provide a consistent represtentation for the
- rest of the objects in the system.
-
- NOTE(bja, 2018-03): do NOT define _schema_major etc at the class
- level in the base class. The nested/recursive nature of externals
- means different schema versions may be present in a single run!
-
- All inheriting classes must overwrite:
- self._schema_major and self._input_major
- self._schema_minor and self._input_minor
- self._schema_patch and self._input_patch
-
- where _schema_x is the supported schema, _input_x is the user
- input value.
-
- """
- # keywords defining the interface into the externals description data; these
- # are brought together by the schema below.
- EXTERNALS = 'externals' # path to externals file.
- BRANCH = 'branch'
- SUBMODULE = 'from_submodule'
- HASH = 'hash'
- NAME = 'name'
- PATH = 'local_path'
- PROTOCOL = 'protocol'
- REPO = 'repo'
- REPO_URL = 'repo_url'
- REQUIRED = 'required'
- TAG = 'tag'
- SPARSE = 'sparse'
-
- PROTOCOL_EXTERNALS_ONLY = 'externals_only'
- PROTOCOL_GIT = 'git'
- PROTOCOL_SVN = 'svn'
- GIT_SUBMODULES_FILENAME = '.gitmodules'
- KNOWN_PRROTOCOLS = [PROTOCOL_GIT, PROTOCOL_SVN, PROTOCOL_EXTERNALS_ONLY]
-
- # v1 xml keywords
- _V1_TREE_PATH = 'TREE_PATH'
- _V1_ROOT = 'ROOT'
- _V1_TAG = 'TAG'
- _V1_BRANCH = 'BRANCH'
- _V1_REQ_SOURCE = 'REQ_SOURCE'
-
- # Dictionary keys are component names. The corresponding values are laid out
- # according to this schema.
- _source_schema = {REQUIRED: True,
- PATH: 'string',
- EXTERNALS: 'string',
- SUBMODULE : True,
- REPO: {PROTOCOL: 'string',
- REPO_URL: 'string',
- TAG: 'string',
- BRANCH: 'string',
- HASH: 'string',
- SPARSE: 'string',
- }
- }
-
- def __init__(self, parent_repo=None):
- """Convert the xml into a standardized dict that can be used to
- construct the source objects
-
- """
- dict.__init__(self)
-
- self._schema_major = None
- self._schema_minor = None
- self._schema_patch = None
- self._input_major = None
- self._input_minor = None
- self._input_patch = None
- self._parent_repo = parent_repo
-
- def _verify_schema_version(self):
- """Use semantic versioning rules to verify we can process this schema.
-
- """
- known = '{0}.{1}.{2}'.format(self._schema_major,
- self._schema_minor,
- self._schema_patch)
- received = '{0}.{1}.{2}'.format(self._input_major,
- self._input_minor,
- self._input_patch)
-
- if self._input_major != self._schema_major:
- # should never get here, the factory should handle this correctly!
- msg = ('DEV_ERROR: version "{0}" parser received '
- 'version "{1}" input.'.format(known, received))
- fatal_error(msg)
-
- if self._input_minor > self._schema_minor:
- msg = ('Incompatible schema version:\n'
- ' User supplied schema version "{0}" is too new."\n'
- ' Can only process version "{1}" files and '
- 'older.'.format(received, known))
- fatal_error(msg)
-
- if self._input_patch > self._schema_patch:
- # NOTE(bja, 2018-03) ignoring for now... Not clear what
- # conditions the test is needed.
- pass
-
- def _check_user_input(self):
- """Run a series of checks to attempt to validate the user input and
- detect errors as soon as possible.
-
- NOTE(bja, 2018-03) These checks are called *after* the file is
- read. That means the schema check can not occur here.
-
- Note: the order is important. check_optional will create
- optional with null data. run check_data first to ensure
- required data was provided correctly by the user.
-
- """
- self._check_data()
- self._check_optional()
- self._validate()
-
- def _check_data(self):
- # pylint: disable=too-many-branches,too-many-statements
- """Check user supplied data is valid where possible.
- """
- for ext_name in self.keys():
- if (self[ext_name][self.REPO][self.PROTOCOL]
- not in self.KNOWN_PRROTOCOLS):
- msg = 'Unknown repository protocol "{0}" in "{1}".'.format(
- self[ext_name][self.REPO][self.PROTOCOL], ext_name)
- fatal_error(msg)
-
- if (self[ext_name][self.REPO][self.PROTOCOL] ==
- self.PROTOCOL_SVN):
- if self.HASH in self[ext_name][self.REPO]:
- msg = ('In repo description for "{0}". svn repositories '
- 'may not include the "hash" keyword.'.format(
- ext_name))
- fatal_error(msg)
-
- if ((self[ext_name][self.REPO][self.PROTOCOL] != self.PROTOCOL_GIT)
- and (self.SUBMODULE in self[ext_name])):
- msg = ('self.SUBMODULE is only supported with {0} protocol, '
- '"{1}" is defined as an {2} repository')
- fatal_error(msg.format(self.PROTOCOL_GIT, ext_name,
- self[ext_name][self.REPO][self.PROTOCOL]))
-
- if (self[ext_name][self.REPO][self.PROTOCOL] !=
- self.PROTOCOL_EXTERNALS_ONLY):
- ref_count = 0
- found_refs = ''
- if self.TAG in self[ext_name][self.REPO]:
- ref_count += 1
- found_refs = '"{0} = {1}", {2}'.format(
- self.TAG, self[ext_name][self.REPO][self.TAG],
- found_refs)
- if self.BRANCH in self[ext_name][self.REPO]:
- ref_count += 1
- found_refs = '"{0} = {1}", {2}'.format(
- self.BRANCH, self[ext_name][self.REPO][self.BRANCH],
- found_refs)
- if self.HASH in self[ext_name][self.REPO]:
- ref_count += 1
- found_refs = '"{0} = {1}", {2}'.format(
- self.HASH, self[ext_name][self.REPO][self.HASH],
- found_refs)
- if (self.SUBMODULE in self[ext_name] and
- self[ext_name][self.SUBMODULE]):
- ref_count += 1
- found_refs = '"{0} = {1}", {2}'.format(
- self.SUBMODULE,
- self[ext_name][self.SUBMODULE], found_refs)
-
- if ref_count > 1:
- msg = 'Model description is over specified! '
- if self.SUBMODULE in self[ext_name]:
- msg += ('from_submodule is not compatible with '
- '"tag", "branch", or "hash" ')
- else:
- msg += (' Only one of "tag", "branch", or "hash" '
- 'may be specified ')
-
- msg += 'for repo description of "{0}".'.format(ext_name)
- msg = '{0}\nFound: {1}'.format(msg, found_refs)
- fatal_error(msg)
- elif ref_count < 1:
- msg = ('Model description is under specified! One of '
- '"tag", "branch", or "hash" must be specified for '
- 'repo description of "{0}"'.format(ext_name))
- fatal_error(msg)
-
- if (self.REPO_URL not in self[ext_name][self.REPO] and
- (self.SUBMODULE not in self[ext_name] or
- not self[ext_name][self.SUBMODULE])):
- msg = ('Model description is under specified! Must have '
- '"repo_url" in repo '
- 'description for "{0}"'.format(ext_name))
- fatal_error(msg)
-
- if (self.SUBMODULE in self[ext_name] and
- self[ext_name][self.SUBMODULE]):
- if self.REPO_URL in self[ext_name][self.REPO]:
- msg = ('Model description is over specified! '
- 'from_submodule keyword is not compatible '
- 'with {0} keyword for'.format(self.REPO_URL))
- msg = '{0} repo description of "{1}"'.format(msg,
- ext_name)
- fatal_error(msg)
-
- if self.PATH in self[ext_name]:
- msg = ('Model description is over specified! '
- 'from_submodule keyword is not compatible with '
- '{0} keyword for'.format(self.PATH))
- msg = '{0} repo description of "{1}"'.format(msg,
- ext_name)
- fatal_error(msg)
-
- if self.REPO_URL in self[ext_name][self.REPO]:
- url = expand_local_url(
- self[ext_name][self.REPO][self.REPO_URL], ext_name)
- self[ext_name][self.REPO][self.REPO_URL] = url
-
- def _check_optional(self):
- # pylint: disable=too-many-branches
- """Some fields like externals, repo:tag repo:branch are
- (conditionally) optional. We don't want the user to be
- required to enter them in every externals description file, but
- still want to validate the input. Check conditions and add
- default values if appropriate.
-
- """
- submod_desc = None # Only load submodules info once
- for field in self:
- # truely optional
- if self.EXTERNALS not in self[field]:
- self[field][self.EXTERNALS] = EMPTY_STR
-
- # git and svn repos must tags and branches for validation purposes.
- if self.TAG not in self[field][self.REPO]:
- self[field][self.REPO][self.TAG] = EMPTY_STR
- if self.BRANCH not in self[field][self.REPO]:
- self[field][self.REPO][self.BRANCH] = EMPTY_STR
- if self.HASH not in self[field][self.REPO]:
- self[field][self.REPO][self.HASH] = EMPTY_STR
- if self.REPO_URL not in self[field][self.REPO]:
- self[field][self.REPO][self.REPO_URL] = EMPTY_STR
- if self.SPARSE not in self[field][self.REPO]:
- self[field][self.REPO][self.SPARSE] = EMPTY_STR
-
- # from_submodule has a complex relationship with other fields
- if self.SUBMODULE in self[field]:
- # User wants to use submodule information, is it available?
- if self._parent_repo is None:
- # No parent == no submodule information
- PPRINTER.pprint(self[field])
- msg = 'No parent submodule for "{0}"'.format(field)
- fatal_error(msg)
- elif self._parent_repo.protocol() != self.PROTOCOL_GIT:
- PPRINTER.pprint(self[field])
- msg = 'Parent protocol, "{0}", does not support submodules'
- fatal_error(msg.format(self._parent_repo.protocol()))
- else:
- args = self._repo_config_from_submodule(field, submod_desc)
- repo_url, repo_path, ref_hash, submod_desc = args
-
- if repo_url is None:
- msg = ('Cannot checkout "{0}" as a submodule, '
- 'repo not found in {1} file')
- fatal_error(msg.format(field,
- self.GIT_SUBMODULES_FILENAME))
- # Fill in submodule fields
- self[field][self.REPO][self.REPO_URL] = repo_url
- self[field][self.REPO][self.HASH] = ref_hash
- self[field][self.PATH] = repo_path
-
- if self[field][self.SUBMODULE]:
- # We should get everything from the parent submodule
- # configuration.
- pass
- # No else (from _submodule = False is the default)
- else:
- # Add the default value (not using submodule information)
- self[field][self.SUBMODULE] = False
-
- def _repo_config_from_submodule(self, field, submod_desc):
- """Find the external config information for a repository from
- its submodule configuration information.
- """
- if submod_desc is None:
- repo_path = os.getcwd() # Is this always correct?
- submod_file = self._parent_repo.submodules_file(repo_path=repo_path)
- if submod_file is None:
- msg = ('Cannot checkout "{0}" from submodule information\n'
- ' Parent repo, "{1}" does not have submodules')
- fatal_error(msg.format(field, self._parent_repo.name()))
-
- printlog(
- 'Processing submodules description file : {0} ({1})'.format(
- submod_file, repo_path))
- submod_model_data= _read_gitmodules_file(repo_path, submod_file)
- submod_desc = create_externals_description(submod_model_data)
-
- # Can we find our external?
- repo_url = None
- repo_path = None
- ref_hash = None
- for ext_field in submod_desc:
- if field == ext_field:
- ext = submod_desc[ext_field]
- repo_url = ext[self.REPO][self.REPO_URL]
- repo_path = ext[self.PATH]
- ref_hash = ext[self.REPO][self.HASH]
- break
-
- return repo_url, repo_path, ref_hash, submod_desc
-
- def _validate(self):
- """Validate that the parsed externals description contains all necessary
- fields.
-
- """
- def print_compare_difference(data_a, data_b, loc_a, loc_b):
- """Look through the data structures and print the differences.
-
- """
- for item in data_a:
- if item in data_b:
- if not isinstance(data_b[item], type(data_a[item])):
- printlog(" {item}: {loc} = {val} ({val_type})".format(
- item=item, loc=loc_a, val=data_a[item],
- val_type=type(data_a[item])))
- printlog(" {item} {loc} = {val} ({val_type})".format(
- item=' ' * len(item), loc=loc_b, val=data_b[item],
- val_type=type(data_b[item])))
- else:
- printlog(" {item}: {loc} = {val} ({val_type})".format(
- item=item, loc=loc_a, val=data_a[item],
- val_type=type(data_a[item])))
- printlog(" {item} {loc} missing".format(
- item=' ' * len(item), loc=loc_b))
-
- def validate_data_struct(schema, data):
- """Compare a data structure against a schema and validate all required
- fields are present.
-
- """
- is_valid = False
- in_ref = True
- valid = True
- if isinstance(schema, dict) and isinstance(data, dict):
- # Both are dicts, recursively verify that all fields
- # in schema are present in the data.
- for key in schema:
- in_ref = in_ref and (key in data)
- if in_ref:
- valid = valid and (
- validate_data_struct(schema[key], data[key]))
-
- is_valid = in_ref and valid
- else:
- # non-recursive structure. verify data and schema have
- # the same type.
- is_valid = isinstance(data, type(schema))
-
- if not is_valid:
- printlog(" Unmatched schema and input:")
- if isinstance(schema, dict):
- print_compare_difference(schema, data, 'schema', 'input')
- print_compare_difference(data, schema, 'input', 'schema')
- else:
- printlog(" schema = {0} ({1})".format(
- schema, type(schema)))
- printlog(" input = {0} ({1})".format(data, type(data)))
-
- return is_valid
-
- for field in self:
- valid = validate_data_struct(self._source_schema, self[field])
- if not valid:
- PPRINTER.pprint(self._source_schema)
- PPRINTER.pprint(self[field])
- msg = 'ERROR: source for "{0}" did not validate'.format(field)
- fatal_error(msg)
-
-
-class ExternalsDescriptionDict(ExternalsDescription):
- """Create a externals description object from a dictionary using the API
- representations. Primarily used to simplify creating model
- description files for unit testing.
-
- """
-
- def __init__(self, model_data, components=None, exclude=None):
- """Parse a native dictionary into a externals description.
- """
- ExternalsDescription.__init__(self)
- self._schema_major = 1
- self._schema_minor = 0
- self._schema_patch = 0
- self._input_major = 1
- self._input_minor = 0
- self._input_patch = 0
- self._verify_schema_version()
- if components:
- for key in list(model_data.keys()):
- if key not in components:
- del model_data[key]
-
- if exclude:
- for key in list(model_data.keys()):
- if key in exclude:
- del model_data[key]
-
- self.update(model_data)
- self._check_user_input()
-
-
-class ExternalsDescriptionConfigV1(ExternalsDescription):
- """Create a externals description object from a config_parser object,
- schema version 1.
-
- """
-
- def __init__(self, model_data, components=None, exclude=None, parent_repo=None):
- """Convert the config data into a standardized dict that can be used to
- construct the source objects
-
- components: list of component names to include, None to include all.
- exclude: list of component names to skip.
- """
- ExternalsDescription.__init__(self, parent_repo=parent_repo)
- self._schema_major = 1
- self._schema_minor = 1
- self._schema_patch = 0
- self._input_major, self._input_minor, self._input_patch = \
- get_cfg_schema_version(model_data)
- self._verify_schema_version()
- self._remove_metadata(model_data)
- self._parse_cfg(model_data, components=components, exclude=exclude)
- self._check_user_input()
-
- @staticmethod
- def _remove_metadata(model_data):
- """Remove the metadata section from the model configuration file so
- that it is simpler to look through the file and construct the
- externals description.
-
- """
- model_data.remove_section(DESCRIPTION_SECTION)
-
- def _parse_cfg(self, cfg_data, components=None, exclude=None):
- """Parse a config_parser object into a externals description.
-
- components: list of component names to include, None to include all.
- exclude: list of component names to skip.
- """
- def list_to_dict(input_list, convert_to_lower_case=True):
- """Convert a list of key-value pairs into a dictionary.
- """
- output_dict = {}
- for item in input_list:
- key = config_string_cleaner(item[0].strip())
- value = config_string_cleaner(item[1].strip())
- if convert_to_lower_case:
- key = key.lower()
- output_dict[key] = value
- return output_dict
-
- for section in cfg_data.sections():
- name = config_string_cleaner(section.lower().strip())
- if (components and name not in components) or (exclude and name in exclude):
- continue
- self[name] = {}
- self[name].update(list_to_dict(cfg_data.items(section)))
- self[name][self.REPO] = {}
- loop_keys = self[name].copy().keys()
- for item in loop_keys:
- if item in self._source_schema:
- if isinstance(self._source_schema[item], bool):
- self[name][item] = str_to_bool(self[name][item])
- elif item in self._source_schema[self.REPO]:
- self[name][self.REPO][item] = self[name][item]
- del self[name][item]
- else:
- msg = ('Invalid input: "{sect}" contains unknown '
- 'item "{item}".'.format(sect=name, item=item))
- fatal_error(msg)
diff --git a/manage_externals/manic/externals_status.py b/manage_externals/manic/externals_status.py
deleted file mode 100644
index 6bc29e9732..0000000000
--- a/manage_externals/manic/externals_status.py
+++ /dev/null
@@ -1,164 +0,0 @@
-"""ExternalStatus
-
-Class to store status and state information about repositories and
-create a string representation.
-
-"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-from .global_constants import EMPTY_STR
-from .utils import printlog, indent_string
-from .global_constants import VERBOSITY_VERBOSE, VERBOSITY_DUMP
-
-
-class ExternalStatus(object):
- """Class to represent the status of a given source repository or tree.
-
- Individual repositories determine their own status in the
- Repository objects. This object is just resposible for storing the
- information and passing it up to a higher level for reporting or
- global decisions.
-
- There are two states of concern:
-
- * If the repository is in-sync with the externals description file.
-
- * If the repostiory working copy is clean and there are no pending
- transactions (e.g. add, remove, rename, untracked files).
-
- """
- # sync_state and clean_state can be one of the following:
- DEFAULT = '-' # not set yet (sync_state). clean_state can be this if sync_state is EMPTY.
- UNKNOWN = '?'
- EMPTY = 'e'
- MODEL_MODIFIED = 's' # repo version != externals (sync_state only)
- DIRTY = 'M' # repo is dirty (clean_state only)
- STATUS_OK = ' ' # repo is clean (clean_state) or matches externals version (sync_state)
- STATUS_ERROR = '!'
-
- # source_type can be one of the following:
- OPTIONAL = 'o'
- STANDALONE = 's'
- MANAGED = ' '
-
- def __init__(self):
- self.sync_state = self.DEFAULT
- self.clean_state = self.DEFAULT
- self.source_type = self.DEFAULT
- self.path = EMPTY_STR
- self.current_version = EMPTY_STR
- self.expected_version = EMPTY_STR
- self.status_output = EMPTY_STR
-
- def log_status_message(self, verbosity):
- """Write status message to the screen and log file
- """
- printlog(self._default_status_message())
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(self._verbose_status_message())
- if verbosity >= VERBOSITY_DUMP:
- printlog(self._dump_status_message())
-
- def __repr__(self):
- return self._default_status_message()
-
- def _default_status_message(self):
- """Return the default terse status message string
- """
- return '{sync}{clean}{src_type} {path}'.format(
- sync=self.sync_state, clean=self.clean_state,
- src_type=self.source_type, path=self.path)
-
- def _verbose_status_message(self):
- """Return the verbose status message string
- """
- clean_str = self.DEFAULT
- if self.clean_state == self.STATUS_OK:
- clean_str = 'clean sandbox'
- elif self.clean_state == self.DIRTY:
- clean_str = 'modified sandbox'
-
- sync_str = 'on {0}'.format(self.current_version)
- if self.sync_state != self.STATUS_OK:
- sync_str = '{current} --> {expected}'.format(
- current=self.current_version, expected=self.expected_version)
- return ' {clean}, {sync}'.format(clean=clean_str, sync=sync_str)
-
- def _dump_status_message(self):
- """Return the dump status message string
- """
- return indent_string(self.status_output, 12)
-
- def safe_to_update(self):
- """Report if it is safe to update a repository. Safe is defined as:
-
- * If a repository is empty, it is safe to update.
-
- * If a repository exists and has a clean working copy state
- with no pending transactions.
-
- """
- safe_to_update = False
- repo_exists = self.exists()
- if not repo_exists:
- safe_to_update = True
- else:
- # If the repo exists, it must be in ok or modified
- # sync_state. Any other sync_state at this point
- # represents a logic error that should have been handled
- # before now!
- sync_safe = ((self.sync_state == ExternalStatus.STATUS_OK) or
- (self.sync_state == ExternalStatus.MODEL_MODIFIED))
- if sync_safe:
- # The clean_state must be STATUS_OK to update. Otherwise we
- # are dirty or there was a missed error previously.
- if self.clean_state == ExternalStatus.STATUS_OK:
- safe_to_update = True
- return safe_to_update
-
- def exists(self):
- """Determine if the repo exists. This is indicated by:
-
- * sync_state is not EMPTY
-
- * if the sync_state is empty, then the valid states for
- clean_state are default, empty or unknown. Anything else
- and there was probably an internal logic error.
-
- NOTE(bja, 2017-10) For the moment we are considering a
- sync_state of default or unknown to require user intervention,
- but we may want to relax this convention. This is probably a
- result of a network error or internal logic error but more
- testing is needed.
-
- """
- is_empty = (self.sync_state == ExternalStatus.EMPTY)
- clean_valid = ((self.clean_state == ExternalStatus.DEFAULT) or
- (self.clean_state == ExternalStatus.EMPTY) or
- (self.clean_state == ExternalStatus.UNKNOWN))
-
- if is_empty and clean_valid:
- exists = False
- else:
- exists = True
- return exists
-
-
-def check_safe_to_update_repos(tree_status):
- """Check if *ALL* repositories are in a safe state to update. We don't
- want to do a partial update of the repositories then die, leaving
- the model in an inconsistent state.
-
- Note: if there is an update to do, the repositories will by
- definiation be out of synce with the externals description, so we
- can't use that as criteria for updating.
-
- """
- safe_to_update = True
- for comp in tree_status:
- stat = tree_status[comp]
- safe_to_update &= stat.safe_to_update()
-
- return safe_to_update
diff --git a/manage_externals/manic/global_constants.py b/manage_externals/manic/global_constants.py
deleted file mode 100644
index 0e91cffc90..0000000000
--- a/manage_externals/manic/global_constants.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""Globals shared across modules
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import pprint
-
-EMPTY_STR = ''
-LOCAL_PATH_INDICATOR = '.'
-VERSION_SEPERATOR = '.'
-LOG_FILE_NAME = 'manage_externals.log'
-PPRINTER = pprint.PrettyPrinter(indent=4)
-
-VERBOSITY_DEFAULT = 0
-VERBOSITY_VERBOSE = 1
-VERBOSITY_DUMP = 2
diff --git a/manage_externals/manic/repository.py b/manage_externals/manic/repository.py
deleted file mode 100644
index ea4230fb7b..0000000000
--- a/manage_externals/manic/repository.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""Base class representation of a repository
-"""
-
-from .externals_description import ExternalsDescription
-from .utils import fatal_error
-from .global_constants import EMPTY_STR
-
-
-class Repository(object):
- """
- Class to represent and operate on a repository description.
- """
-
- def __init__(self, component_name, repo):
- """
- Parse repo externals description
- """
- self._name = component_name
- self._protocol = repo[ExternalsDescription.PROTOCOL]
- self._tag = repo[ExternalsDescription.TAG]
- self._branch = repo[ExternalsDescription.BRANCH]
- self._hash = repo[ExternalsDescription.HASH]
- self._url = repo[ExternalsDescription.REPO_URL]
- self._sparse = repo[ExternalsDescription.SPARSE]
-
- if self._url is EMPTY_STR:
- fatal_error('repo must have a URL')
-
- if ((self._tag is EMPTY_STR) and (self._branch is EMPTY_STR) and
- (self._hash is EMPTY_STR)):
- fatal_error('{0} repo must have a branch, tag or hash element')
-
- ref_count = 0
- if self._tag is not EMPTY_STR:
- ref_count += 1
- if self._branch is not EMPTY_STR:
- ref_count += 1
- if self._hash is not EMPTY_STR:
- ref_count += 1
- if ref_count != 1:
- fatal_error('repo {0} must have exactly one of '
- 'tag, branch or hash.'.format(self._name))
-
- def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): # pylint: disable=unused-argument
- """
- If the repo destination directory exists, ensure it is correct (from
- correct URL, correct branch or tag), and possibly update the source.
- If the repo destination directory does not exist, checkout the correce
- branch or tag.
- NB: is include as an argument for compatibility with
- git functionality (repository_git.py)
- """
- msg = ('DEV_ERROR: checkout method must be implemented in all '
- 'repository classes! {0}'.format(self.__class__.__name__))
- fatal_error(msg)
-
- def status(self, stat, repo_dir_path): # pylint: disable=unused-argument
- """Report the status of the repo
-
- """
- msg = ('DEV_ERROR: status method must be implemented in all '
- 'repository classes! {0}'.format(self.__class__.__name__))
- fatal_error(msg)
-
- def submodules_file(self, repo_path=None):
- # pylint: disable=no-self-use,unused-argument
- """Stub for use by non-git VC systems"""
- return None
-
- def url(self):
- """Public access of repo url.
- """
- return self._url
-
- def tag(self):
- """Public access of repo tag
- """
- return self._tag
-
- def branch(self):
- """Public access of repo branch.
- """
- return self._branch
-
- def hash(self):
- """Public access of repo hash.
- """
- return self._hash
-
- def name(self):
- """Public access of repo name.
- """
- return self._name
-
- def protocol(self):
- """Public access of repo protocol.
- """
- return self._protocol
diff --git a/manage_externals/manic/repository_factory.py b/manage_externals/manic/repository_factory.py
deleted file mode 100644
index 18c73ffc4b..0000000000
--- a/manage_externals/manic/repository_factory.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""Factory for creating and initializing the appropriate repository class
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-from .repository_git import GitRepository
-from .repository_svn import SvnRepository
-from .externals_description import ExternalsDescription
-from .utils import fatal_error
-
-
-def create_repository(component_name, repo_info, svn_ignore_ancestry=False):
- """Determine what type of repository we have, i.e. git or svn, and
- create the appropriate object.
-
- Can return None (e.g. if protocol is 'externals_only').
- """
- protocol = repo_info[ExternalsDescription.PROTOCOL].lower()
- if protocol == 'git':
- repo = GitRepository(component_name, repo_info)
- elif protocol == 'svn':
- repo = SvnRepository(component_name, repo_info, ignore_ancestry=svn_ignore_ancestry)
- elif protocol == 'externals_only':
- repo = None
- else:
- msg = 'Unknown repo protocol "{0}"'.format(protocol)
- fatal_error(msg)
- return repo
diff --git a/manage_externals/manic/repository_git.py b/manage_externals/manic/repository_git.py
deleted file mode 100644
index aab1a468a8..0000000000
--- a/manage_externals/manic/repository_git.py
+++ /dev/null
@@ -1,859 +0,0 @@
-"""Class for interacting with git repositories
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import copy
-import os
-import sys
-
-from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR
-from .global_constants import VERBOSITY_VERBOSE
-from .repository import Repository
-from .externals_status import ExternalStatus
-from .externals_description import ExternalsDescription, git_submodule_status
-from .utils import expand_local_url, split_remote_url, is_remote_url
-from .utils import fatal_error, printlog
-from .utils import execute_subprocess
-
-
-class GitRepository(Repository):
- """Class to represent and operate on a repository description.
-
- For testing purpose, all system calls to git should:
-
- * be isolated in separate functions with no application logic
- * of the form:
- - cmd = 'git -C {dirname} ...'.format(dirname=dirname).split()
- - value = execute_subprocess(cmd, output_to_caller={T|F},
- status_to_caller={T|F})
- - return value
- * be static methods (not rely on self)
- * name as _git_subcommand_args(user_args)
-
- This convention allows easy unit testing of the repository logic
- by mocking the specific calls to return predefined results.
-
- """
-
- def __init__(self, component_name, repo):
- """
- repo: ExternalsDescription.
- """
- Repository.__init__(self, component_name, repo)
- self._gitmodules = None
- self._submods = None
-
- # ----------------------------------------------------------------
- #
- # Public API, defined by Repository
- #
- # ----------------------------------------------------------------
- def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive):
- """
- If the repo destination directory exists, ensure it is correct (from
- correct URL, correct branch or tag), and possibly update the source.
- If the repo destination directory does not exist, checkout the correct
- branch or tag.
- """
- repo_dir_path = os.path.join(base_dir_path, repo_dir_name)
- repo_dir_exists = os.path.exists(repo_dir_path)
- if (repo_dir_exists and not os.listdir(
- repo_dir_path)) or not repo_dir_exists:
- self._clone_repo(base_dir_path, repo_dir_name, verbosity)
- self._checkout_ref(repo_dir_path, verbosity, recursive)
- gmpath = os.path.join(repo_dir_path,
- ExternalsDescription.GIT_SUBMODULES_FILENAME)
- if os.path.exists(gmpath):
- self._gitmodules = gmpath
- self._submods = git_submodule_status(repo_dir_path)
- else:
- self._gitmodules = None
- self._submods = None
-
- def status(self, stat, repo_dir_path):
- """
- If the repo destination directory exists, ensure it is correct (from
- correct URL, correct branch or tag), and possibly update the source.
- If the repo destination directory does not exist, checkout the correct
- branch or tag.
- """
- self._check_sync(stat, repo_dir_path)
- if os.path.exists(repo_dir_path):
- self._status_summary(stat, repo_dir_path)
-
- def submodules_file(self, repo_path=None):
- if repo_path is not None:
- gmpath = os.path.join(repo_path,
- ExternalsDescription.GIT_SUBMODULES_FILENAME)
- if os.path.exists(gmpath):
- self._gitmodules = gmpath
- self._submods = git_submodule_status(repo_path)
-
- return self._gitmodules
-
- # ----------------------------------------------------------------
- #
- # Internal work functions
- #
- # ----------------------------------------------------------------
- def _clone_repo(self, base_dir_path, repo_dir_name, verbosity):
- """Clones repo_dir_name into base_dir_path.
- """
- self._git_clone(self._url, os.path.join(base_dir_path, repo_dir_name),
- verbosity=verbosity)
-
- def _current_ref(self, dirname):
- """Determine the *name* associated with HEAD at dirname.
-
- If we're on a tag, then returns the tag name; otherwise, returns
- the current hash. Returns an empty string if no reference can be
- determined (e.g., if we're not actually in a git repository).
-
- If we're on a branch, then the branch name is also included in
- the returned string (in addition to the tag / hash).
- """
- ref_found = False
-
- # If we're exactly at a tag, use that as the current ref
- tag_found, tag_name = self._git_current_tag(dirname)
- if tag_found:
- current_ref = tag_name
- ref_found = True
-
- if not ref_found:
- # Otherwise, use current hash as the current ref
- hash_found, hash_name = self._git_current_hash(dirname)
- if hash_found:
- current_ref = hash_name
- ref_found = True
-
- if ref_found:
- # If we're on a branch, include branch name in current ref
- branch_found, branch_name = self._git_current_branch(dirname)
- if branch_found:
- current_ref = "{} (branch {})".format(current_ref, branch_name)
- else:
- # If we still can't find a ref, return empty string. This
- # can happen if we're not actually in a git repo
- current_ref = ''
-
- return current_ref
-
- def _check_sync(self, stat, repo_dir_path):
- """Determine whether a git repository is in-sync with the model
- description.
-
- Because repos can have multiple remotes, the only criteria is
- whether the branch or tag is the same.
-
- """
- if not os.path.exists(repo_dir_path):
- # NOTE(bja, 2017-10) condition should have been determined
- # by _Source() object and should never be here!
- stat.sync_state = ExternalStatus.STATUS_ERROR
- else:
- git_dir = os.path.join(repo_dir_path, '.git')
- if not os.path.exists(git_dir):
- # NOTE(bja, 2017-10) directory exists, but no git repo
- # info.... Can't test with subprocess git command
- # because git will move up directory tree until it
- # finds the parent repo git dir!
- stat.sync_state = ExternalStatus.UNKNOWN
- else:
- self._check_sync_logic(stat, repo_dir_path)
-
- def _check_sync_logic(self, stat, repo_dir_path):
- """Compare the underlying hashes of the currently checkout ref and the
- expected ref.
-
- Output: sets the sync_state as well as the current and
- expected ref in the input status object.
-
- """
- def compare_refs(current_ref, expected_ref):
- """Compare the current and expected ref.
-
- """
- if current_ref == expected_ref:
- status = ExternalStatus.STATUS_OK
- else:
- status = ExternalStatus.MODEL_MODIFIED
- return status
-
- # get the full hash of the current commit
- _, current_ref = self._git_current_hash(repo_dir_path)
-
- if self._branch:
- if self._url == LOCAL_PATH_INDICATOR:
- expected_ref = self._branch
- else:
- remote_name = self._remote_name_for_url(self._url,
- repo_dir_path)
- if not remote_name:
- # git doesn't know about this remote. by definition
- # this is a modified state.
- expected_ref = "unknown_remote/{0}".format(self._branch)
- else:
- expected_ref = "{0}/{1}".format(remote_name, self._branch)
- elif self._hash:
- expected_ref = self._hash
- elif self._tag:
- expected_ref = self._tag
- else:
- msg = 'In repo "{0}": none of branch, hash or tag are set'.format(
- self._name)
- fatal_error(msg)
-
- # record the *names* of the current and expected branches
- stat.current_version = self._current_ref(repo_dir_path)
- stat.expected_version = copy.deepcopy(expected_ref)
-
- if current_ref == EMPTY_STR:
- stat.sync_state = ExternalStatus.UNKNOWN
- else:
- # get the underlying hash of the expected ref
- revparse_status, expected_ref_hash = self._git_revparse_commit(
- expected_ref, repo_dir_path)
- if revparse_status:
- # We failed to get the hash associated with
- # expected_ref. Maybe we should assign this to some special
- # status, but for now we're just calling this out-of-sync to
- # remain consistent with how this worked before.
- stat.sync_state = ExternalStatus.MODEL_MODIFIED
- else:
- # compare the underlying hashes
- stat.sync_state = compare_refs(current_ref, expected_ref_hash)
-
- @classmethod
- def _remote_name_for_url(cls, remote_url, dirname):
- """Return the remote name matching remote_url (or None)
-
- """
- git_output = cls._git_remote_verbose(dirname)
- git_output = git_output.splitlines()
- for line in git_output:
- data = line.strip()
- if not data:
- continue
- data = data.split()
- name = data[0].strip()
- url = data[1].strip()
- if remote_url == url:
- return name
- return None
-
- def _create_remote_name(self):
- """The url specified in the externals description file was not known
- to git. We need to add it, which means adding a unique and
- safe name....
-
- The assigned name needs to be safe for git to use, e.g. can't
- look like a path 'foo/bar' and work with both remote and local paths.
-
- Remote paths include but are not limited to: git, ssh, https,
- github, gitlab, bitbucket, custom server, etc.
-
- Local paths can be relative or absolute. They may contain
- shell variables, e.g. ${REPO_ROOT}/repo_name, or username
- expansion, i.e. ~/ or ~someuser/.
-
- Relative paths must be at least one layer of redirection, i.e.
- container/../ext_repo, but may be many layers deep, e.g.
- container/../../../../../ext_repo
-
- NOTE(bja, 2017-11)
-
- The base name below may not be unique, for example if the
- user has local paths like:
-
- /path/to/my/repos/nice_repo
- /path/to/other/repos/nice_repo
-
- But the current implementation should cover most common
- use cases for remotes and still provide usable names.
-
- """
- url = copy.deepcopy(self._url)
- if is_remote_url(url):
- url = split_remote_url(url)
- else:
- url = expand_local_url(url, self._name)
- url = url.split('/')
- repo_name = url[-1]
- base_name = url[-2]
- # repo name should nominally already be something that git can
- # deal with. We need to remove other possibly troublesome
- # punctuation, e.g. /, $, from the base name.
- unsafe_characters = '!@#$%^&*()[]{}\\/,;~'
- for unsafe in unsafe_characters:
- base_name = base_name.replace(unsafe, '')
- remote_name = "{0}_{1}".format(base_name, repo_name)
- return remote_name
-
- def _checkout_ref(self, repo_dir, verbosity, submodules):
- """Checkout the user supplied reference
- if is True, recursively initialize and update
- the repo's submodules
- """
- # import pdb; pdb.set_trace()
- if self._url.strip() == LOCAL_PATH_INDICATOR:
- self._checkout_local_ref(verbosity, submodules, repo_dir)
- else:
- self._checkout_external_ref(verbosity, submodules, repo_dir)
-
- if self._sparse:
- self._sparse_checkout(repo_dir, verbosity)
-
-
- def _checkout_local_ref(self, verbosity, submodules, dirname):
- """Checkout the reference considering the local repo only. Do not
- fetch any additional remotes or specify the remote when
- checkout out the ref.
- if is True, recursively initialize and update
- the repo's submodules
- """
- if self._tag:
- ref = self._tag
- elif self._branch:
- ref = self._branch
- else:
- ref = self._hash
-
- self._check_for_valid_ref(ref, remote_name=None,
- dirname=dirname)
- self._git_checkout_ref(ref, verbosity, submodules, dirname)
-
- def _checkout_external_ref(self, verbosity, submodules, dirname):
- """Checkout the reference from a remote repository into dirname.
- if is True, recursively initialize and update
- the repo's submodules.
- Note that this results in a 'detached HEAD' state if checking out
- a branch, because we check out the remote branch rather than the
- local. See https://github.com/ESMCI/manage_externals/issues/34 for
- more discussion.
- """
- if self._tag:
- ref = self._tag
- elif self._branch:
- ref = self._branch
- else:
- ref = self._hash
-
- remote_name = self._remote_name_for_url(self._url, dirname)
- if not remote_name:
- remote_name = self._create_remote_name()
- self._git_remote_add(remote_name, self._url, dirname)
- self._git_fetch(remote_name, dirname)
-
- # NOTE(bja, 2018-03) we need to send separate ref and remote
- # name to check_for_vaild_ref, but the combined name to
- # checkout_ref!
- self._check_for_valid_ref(ref, remote_name, dirname)
-
- if self._branch:
- # Prepend remote name to branch. This means we avoid various
- # special cases if the local branch is not tracking the remote or
- # cannot be trivially fast-forwarded to match; but, it also
- # means we end up in a 'detached HEAD' state.
- ref = '{0}/{1}'.format(remote_name, ref)
- self._git_checkout_ref(ref, verbosity, submodules, dirname)
-
- def _sparse_checkout(self, repo_dir, verbosity):
- """Use git read-tree to thin the working tree."""
- cmd = ['cp', os.path.join(repo_dir, self._sparse),
- os.path.join(repo_dir,
- '.git/info/sparse-checkout')]
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
- self._git_sparse_checkout(verbosity, repo_dir)
-
- def _check_for_valid_ref(self, ref, remote_name, dirname):
- """Try some basic sanity checks on the user supplied reference so we
- can provide a more useful error message than calledprocess
- error...
-
- remote_name can be NOne
- """
- is_tag = self._ref_is_tag(ref, dirname)
- is_branch = self._ref_is_branch(ref, remote_name, dirname)
- is_hash = self._ref_is_hash(ref, dirname)
- is_valid = is_tag or is_branch or is_hash
- if not is_valid:
- msg = ('In repo "{0}": reference "{1}" does not appear to be a '
- 'valid tag, branch or hash! Please verify the reference '
- 'name (e.g. spelling), is available from: {2} '.format(
- self._name, ref, self._url))
- fatal_error(msg)
-
- if is_tag:
- is_unique_tag, msg = self._is_unique_tag(ref, remote_name,
- dirname)
- if not is_unique_tag:
- msg = ('In repo "{0}": tag "{1}" {2}'.format(
- self._name, self._tag, msg))
- fatal_error(msg)
-
- return is_valid
-
- def _is_unique_tag(self, ref, remote_name, dirname):
- """Verify that a reference is a valid tag and is unique (not a branch)
-
- Tags may be tag names, or SHA id's. It is also possible that a
- branch and tag have the some name.
-
- Note: values returned by git_showref_* and git_revparse are
- shell return codes, which are zero for success, non-zero for
- error!
-
- """
- is_tag = self._ref_is_tag(ref, dirname)
- is_branch = self._ref_is_branch(ref, remote_name, dirname)
- is_hash = self._ref_is_hash(ref, dirname)
-
- msg = ''
- is_unique_tag = False
- if is_tag and not is_branch:
- # unique tag
- msg = 'is ok'
- is_unique_tag = True
- elif is_tag and is_branch:
- msg = ('is both a branch and a tag. git may checkout the branch '
- 'instead of the tag depending on your version of git.')
- is_unique_tag = False
- elif not is_tag and is_branch:
- msg = ('is a branch, and not a tag. If you intended to checkout '
- 'a branch, please change the externals description to be '
- 'a branch. If you intended to checkout a tag, it does not '
- 'exist. Please check the name.')
- is_unique_tag = False
- else: # not is_tag and not is_branch:
- if is_hash:
- # probably a sha1 or HEAD, etc, we call it a tag
- msg = 'is ok'
- is_unique_tag = True
- else:
- # undetermined state.
- msg = ('does not appear to be a valid tag, branch or hash! '
- 'Please check the name and repository.')
- is_unique_tag = False
-
- return is_unique_tag, msg
-
- def _ref_is_tag(self, ref, dirname):
- """Verify that a reference is a valid tag according to git.
-
- Note: values returned by git_showref_* and git_revparse are
- shell return codes, which are zero for success, non-zero for
- error!
- """
- is_tag = False
- value = self._git_showref_tag(ref, dirname)
- if value == 0:
- is_tag = True
- return is_tag
-
- def _ref_is_branch(self, ref, remote_name, dirname):
- """Verify if a ref is any kind of branch (local, tracked remote,
- untracked remote).
-
- remote_name can be None.
- """
- local_branch = False
- remote_branch = False
- if remote_name:
- remote_branch = self._ref_is_remote_branch(ref, remote_name,
- dirname)
- local_branch = self._ref_is_local_branch(ref, dirname)
-
- is_branch = False
- if local_branch or remote_branch:
- is_branch = True
- return is_branch
-
- def _ref_is_local_branch(self, ref, dirname):
- """Verify that a reference is a valid branch according to git.
-
- show-ref branch returns local branches that have been
- previously checked out. It will not necessarily pick up
- untracked remote branches.
-
- Note: values returned by git_showref_* and git_revparse are
- shell return codes, which are zero for success, non-zero for
- error!
-
- """
- is_branch = False
- value = self._git_showref_branch(ref, dirname)
- if value == 0:
- is_branch = True
- return is_branch
-
- def _ref_is_remote_branch(self, ref, remote_name, dirname):
- """Verify that a reference is a valid branch according to git.
-
- show-ref branch returns local branches that have been
- previously checked out. It will not necessarily pick up
- untracked remote branches.
-
- Note: values returned by git_showref_* and git_revparse are
- shell return codes, which are zero for success, non-zero for
- error!
-
- """
- is_branch = False
- value = self._git_lsremote_branch(ref, remote_name, dirname)
- if value == 0:
- is_branch = True
- return is_branch
-
- def _ref_is_commit(self, ref, dirname):
- """Verify that a reference is a valid commit according to git.
-
- This could be a tag, branch, sha1 id, HEAD and potentially others...
-
- Note: values returned by git_showref_* and git_revparse are
- shell return codes, which are zero for success, non-zero for
- error!
- """
- is_commit = False
- value, _ = self._git_revparse_commit(ref, dirname)
- if value == 0:
- is_commit = True
- return is_commit
-
- def _ref_is_hash(self, ref, dirname):
- """Verify that a reference is a valid hash according to git.
-
- Git doesn't seem to provide an exact way to determine if user
- supplied reference is an actual hash. So we verify that the
- ref is a valid commit and return the underlying commit
- hash. Then check that the commit hash begins with the user
- supplied string.
-
- Note: values returned by git_showref_* and git_revparse are
- shell return codes, which are zero for success, non-zero for
- error!
-
- """
- is_hash = False
- status, git_output = self._git_revparse_commit(ref, dirname)
- if status == 0:
- if git_output.strip().startswith(ref):
- is_hash = True
- return is_hash
-
- def _status_summary(self, stat, repo_dir_path):
- """Determine the clean/dirty status of a git repository
-
- """
- git_output = self._git_status_porcelain_v1z(repo_dir_path)
- is_dirty = self._status_v1z_is_dirty(git_output)
- if is_dirty:
- stat.clean_state = ExternalStatus.DIRTY
- else:
- stat.clean_state = ExternalStatus.STATUS_OK
-
- # Now save the verbose status output incase the user wants to
- # see it.
- stat.status_output = self._git_status_verbose(repo_dir_path)
-
- @staticmethod
- def _status_v1z_is_dirty(git_output):
- """Parse the git status output from --porcelain=v1 -z and determine if
- the repo status is clean or dirty. Dirty means:
-
- * modified files
- * missing files
- * added files
- * removed
- * renamed
- * unmerged
-
- Whether untracked files are considered depends on how the status
- command was run (i.e., whether it was run with the '-u' option).
-
- NOTE: Based on the above definition, the porcelain status
- should be an empty string to be considered 'clean'. Of course
- this assumes we only get an empty string from an status
- command on a clean checkout, and not some error
- condition... Could alse use 'git diff --quiet'.
-
- """
- is_dirty = False
- if git_output:
- is_dirty = True
- return is_dirty
-
- # ----------------------------------------------------------------
- #
- # system call to git for information gathering
- #
- # ----------------------------------------------------------------
- @staticmethod
- def _git_current_hash(dirname):
- """Return the full hash of the currently checked-out version.
-
- Returns a tuple, (hash_found, hash), where hash_found is a
- logical specifying whether a hash was found for HEAD (False
- could mean we're not in a git repository at all). (If hash_found
- is False, then hash is ''.)
- """
- status, git_output = GitRepository._git_revparse_commit("HEAD",
- dirname)
- hash_found = not status
- if not hash_found:
- git_output = ''
- return hash_found, git_output
-
- @staticmethod
- def _git_current_remote_branch(dirname):
- """Determines the name of the current remote branch, if any.
-
- if dir is None, uses the cwd.
-
- Returns a tuple, (branch_found, branch_name), where branch_found
- is a bool specifying whether a branch name was found for
- HEAD. (If branch_found is False, then branch_name is '').
- branch_name is in the format '$remote/$branch', e.g. 'origin/foo'.
- """
- branch_found = False
- branch_name = ''
-
- cmd = 'git -C {dirname} log -n 1 --pretty=%d HEAD'.format(
- dirname=dirname).split()
- status, git_output = execute_subprocess(cmd,
- output_to_caller=True,
- status_to_caller=True)
- branch_found = 'HEAD,' in git_output
- if branch_found:
- # git_output is of the form " (HEAD, origin/blah)"
- branch_name = git_output.split(',')[1].strip()[:-1]
- return branch_found, branch_name
-
- @staticmethod
- def _git_current_branch(dirname):
- """Determines the name of the current local branch.
-
- Returns a tuple, (branch_found, branch_name), where branch_found
- is a bool specifying whether a branch name was found for
- HEAD. (If branch_found is False, then branch_name is ''.)
- Note that currently we check out the remote branch rather than
- the local, so this command does not return the just-checked-out
- branch. See _git_current_remote_branch.
- """
- cmd = 'git -C {dirname} symbolic-ref --short -q HEAD'.format(
- dirname=dirname).split()
- status, git_output = execute_subprocess(cmd,
- output_to_caller=True,
- status_to_caller=True)
- branch_found = not status
- if branch_found:
- git_output = git_output.strip()
- else:
- git_output = ''
- return branch_found, git_output
-
- @staticmethod
- def _git_current_tag(dirname):
- """Determines the name tag corresponding to HEAD (if any).
-
- if dirname is None, uses the cwd.
-
- Returns a tuple, (tag_found, tag_name), where tag_found is a
- bool specifying whether we found a tag name corresponding to
- HEAD. (If tag_found is False, then tag_name is ''.)
- """
- cmd = 'git -C {dirname} describe --exact-match --tags HEAD'.format(
- dirname=dirname).split()
- status, git_output = execute_subprocess(cmd,
- output_to_caller=True,
- status_to_caller=True)
- tag_found = not status
- if tag_found:
- git_output = git_output.strip()
- else:
- git_output = ''
- return tag_found, git_output
-
- @staticmethod
- def _git_showref_tag(ref, dirname):
- """Run git show-ref check if the user supplied ref is a tag.
-
- could also use git rev-parse --quiet --verify tagname^{tag}
- """
- cmd = ('git -C {dirname} show-ref --quiet --verify refs/tags/{ref}'
- .format(dirname=dirname, ref=ref).split())
- status = execute_subprocess(cmd, status_to_caller=True)
- return status
-
- @staticmethod
- def _git_showref_branch(ref, dirname):
- """Run git show-ref check if the user supplied ref is a local or
- tracked remote branch.
-
- """
- cmd = ('git -C {dirname} show-ref --quiet --verify refs/heads/{ref}'
- .format(dirname=dirname, ref=ref).split())
- status = execute_subprocess(cmd, status_to_caller=True)
- return status
-
- @staticmethod
- def _git_lsremote_branch(ref, remote_name, dirname):
- """Run git ls-remote to check if the user supplied ref is a remote
- branch that is not being tracked
-
- """
- cmd = ('git -C {dirname} ls-remote --exit-code --heads '
- '{remote_name} {ref}').format(
- dirname=dirname, remote_name=remote_name, ref=ref).split()
- status, output = execute_subprocess(cmd, status_to_caller=True, output_to_caller=True)
- if not status and not f"refs/heads/{ref}" in output:
- # In this case the ref is contained in the branch name but is not the complete branch name
- return -1
- return status
-
- @staticmethod
- def _git_revparse_commit(ref, dirname):
- """Run git rev-parse to detect if a reference is a SHA, HEAD or other
- valid commit.
-
- """
- cmd = ('git -C {dirname} rev-parse --quiet --verify {ref}^{commit}'
- .format(dirname=dirname, ref=ref, commit='{commit}').split())
- status, git_output = execute_subprocess(cmd, status_to_caller=True,
- output_to_caller=True)
- git_output = git_output.strip()
- return status, git_output
-
- @staticmethod
- def _git_status_porcelain_v1z(dirname):
- """Run git status to obtain repository information.
-
- This is run with '--untracked=no' to ignore untracked files.
-
- The machine-portable format that is guaranteed not to change
- between git versions or *user configuration*.
-
- """
- cmd = ('git -C {dirname} status --untracked-files=no --porcelain -z'
- .format(dirname=dirname)).split()
- git_output = execute_subprocess(cmd, output_to_caller=True)
- return git_output
-
- @staticmethod
- def _git_status_verbose(dirname):
- """Run the git status command to obtain repository information.
- """
- cmd = 'git -C {dirname} status'.format(dirname=dirname).split()
- git_output = execute_subprocess(cmd, output_to_caller=True)
- return git_output
-
- @staticmethod
- def _git_remote_verbose(dirname):
- """Run the git remote command to obtain repository information.
-
- Returned string is of the form:
- myfork git@github.com:johnpaulalex/manage_externals_jp.git (fetch)
- myfork git@github.com:johnpaulalex/manage_externals_jp.git (push)
- """
- cmd = 'git -C {dirname} remote --verbose'.format(
- dirname=dirname).split()
- return execute_subprocess(cmd, output_to_caller=True)
-
- @staticmethod
- def has_submodules(repo_dir_path):
- """Return True iff the repository at has a
- '.gitmodules' file
- """
- fname = os.path.join(repo_dir_path,
- ExternalsDescription.GIT_SUBMODULES_FILENAME)
-
- return os.path.exists(fname)
-
- # ----------------------------------------------------------------
- #
- # system call to git for sideffects modifying the working tree
- #
- # ----------------------------------------------------------------
- @staticmethod
- def _git_clone(url, repo_dir_name, verbosity):
- """Clones url into repo_dir_name.
- """
- cmd = 'git clone --quiet {url} {repo_dir_name}'.format(
- url=url, repo_dir_name=repo_dir_name).split()
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
-
- @staticmethod
- def _git_remote_add(name, url, dirname):
- """Run the git remote command for the side effect of adding a remote
- """
- cmd = 'git -C {dirname} remote add {name} {url}'.format(
- dirname=dirname, name=name, url=url).split()
- execute_subprocess(cmd)
-
- @staticmethod
- def _git_fetch(remote_name, dirname):
- """Run the git fetch command for the side effect of updating the repo
- """
- cmd = 'git -C {dirname} fetch --quiet --tags {remote_name}'.format(
- dirname=dirname, remote_name=remote_name).split()
- execute_subprocess(cmd)
-
- @staticmethod
- def _git_checkout_ref(ref, verbosity, submodules, dirname):
- """Run the git checkout command for the side effect of updating the repo
-
- Param: ref is a reference to a local or remote object in the
- form 'origin/my_feature', or 'tag1'.
-
- """
- cmd = 'git -C {dirname} checkout --quiet {ref}'.format(
- dirname=dirname, ref=ref).split()
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
- if submodules:
- GitRepository._git_update_submodules(verbosity, dirname)
-
- @staticmethod
- def _git_sparse_checkout(verbosity, dirname):
- """Configure repo via read-tree."""
- cmd = 'git -C {dirname} config core.sparsecheckout true'.format(
- dirname=dirname).split()
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
- cmd = 'git -C {dirname} read-tree -mu HEAD'.format(
- dirname=dirname).split()
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
-
- @staticmethod
- def _git_update_submodules(verbosity, dirname):
- """Run git submodule update for the side effect of updating this
- repo's submodules.
- """
- # due to https://vielmetti.typepad.com/logbook/2022/10/git-security-fixes-lead-to-fatal-transport-file-not-allowed-error-in-ci-systems-cve-2022-39253.html
- # submodules from file doesn't work without overriding the protocol, this is done
- # for testing submodule support but should not be done in practice
- file_protocol = ""
- if 'unittest' in sys.modules.keys():
- file_protocol = "-c protocol.file.allow=always"
-
- # First, verify that we have a .gitmodules file
- if os.path.exists(
- os.path.join(dirname,
- ExternalsDescription.GIT_SUBMODULES_FILENAME)):
- cmd = ('git {file_protocol} -C {dirname} submodule update --init --recursive'
- .format(file_protocol=file_protocol, dirname=dirname)).split()
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
-
- execute_subprocess(cmd)
diff --git a/manage_externals/manic/repository_svn.py b/manage_externals/manic/repository_svn.py
deleted file mode 100644
index 32a71184b4..0000000000
--- a/manage_externals/manic/repository_svn.py
+++ /dev/null
@@ -1,291 +0,0 @@
-"""Class for interacting with svn repositories
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import os
-import re
-import xml.etree.ElementTree as ET
-
-from .global_constants import EMPTY_STR, VERBOSITY_VERBOSE
-from .repository import Repository
-from .externals_status import ExternalStatus
-from .utils import fatal_error, indent_string, printlog
-from .utils import execute_subprocess
-
-
-class SvnRepository(Repository):
- """
- Class to represent and operate on a repository description.
-
- For testing purpose, all system calls to svn should:
-
- * be isolated in separate functions with no application logic
- * of the form:
- - cmd = ['svn', ...]
- - value = execute_subprocess(cmd, output_to_caller={T|F},
- status_to_caller={T|F})
- - return value
- * be static methods (not rely on self)
- * name as _svn_subcommand_args(user_args)
-
- This convention allows easy unit testing of the repository logic
- by mocking the specific calls to return predefined results.
-
- """
- RE_URLLINE = re.compile(r'^URL:')
-
- def __init__(self, component_name, repo, ignore_ancestry=False):
- """
- Parse repo (a XML element).
- """
- Repository.__init__(self, component_name, repo)
- if 'github.com' in self._url:
- msg = "SVN access to github.com is no longer supported"
- fatal_error(msg)
- self._ignore_ancestry = ignore_ancestry
- if self._url.endswith('/'):
- # there is already a '/' separator in the URL; no need to add another
- url_sep = ''
- else:
- url_sep = '/'
- if self._branch:
- self._url = self._url + url_sep + self._branch
- elif self._tag:
- self._url = self._url + url_sep + self._tag
- else:
- msg = "DEV_ERROR in svn repository. Shouldn't be here!"
- fatal_error(msg)
-
- # ----------------------------------------------------------------
- #
- # Public API, defined by Repository
- #
- # ----------------------------------------------------------------
- def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): # pylint: disable=unused-argument
- """Checkout or update the working copy
-
- If the repo destination directory exists, switch the sandbox to
- match the externals description.
-
- If the repo destination directory does not exist, checkout the
- correct branch or tag.
- NB: is include as an argument for compatibility with
- git functionality (repository_git.py)
-
- """
- repo_dir_path = os.path.join(base_dir_path, repo_dir_name)
- if os.path.exists(repo_dir_path):
- cwd = os.getcwd()
- os.chdir(repo_dir_path)
- self._svn_switch(self._url, self._ignore_ancestry, verbosity)
- # svn switch can lead to a conflict state, but it gives a
- # return code of 0. So now we need to make sure that we're
- # in a clean (non-conflict) state.
- self._abort_if_dirty(repo_dir_path,
- "Expected clean state following switch")
- os.chdir(cwd)
- else:
- self._svn_checkout(self._url, repo_dir_path, verbosity)
-
- def status(self, stat, repo_dir_path):
- """
- Check and report the status of the repository
- """
- self._check_sync(stat, repo_dir_path)
- if os.path.exists(repo_dir_path):
- self._status_summary(stat, repo_dir_path)
-
- # ----------------------------------------------------------------
- #
- # Internal work functions
- #
- # ----------------------------------------------------------------
- def _check_sync(self, stat, repo_dir_path):
- """Check to see if repository directory exists and is at the expected
- url. Return: status object
-
- """
- if not os.path.exists(repo_dir_path):
- # NOTE(bja, 2017-10) this state should have been handled by
- # the source object and we never get here!
- stat.sync_state = ExternalStatus.STATUS_ERROR
- else:
- svn_output = self._svn_info(repo_dir_path)
- if not svn_output:
- # directory exists, but info returned nothing. .svn
- # directory removed or incomplete checkout?
- stat.sync_state = ExternalStatus.UNKNOWN
- else:
- stat.sync_state, stat.current_version = \
- self._check_url(svn_output, self._url)
- stat.expected_version = '/'.join(self._url.split('/')[3:])
-
- def _abort_if_dirty(self, repo_dir_path, message):
- """Check if the repo is in a dirty state; if so, abort with a
- helpful message.
-
- """
-
- stat = ExternalStatus()
- self._status_summary(stat, repo_dir_path)
- if stat.clean_state != ExternalStatus.STATUS_OK:
- status = self._svn_status_verbose(repo_dir_path)
- status = indent_string(status, 4)
- errmsg = """In directory
- {cwd}
-
-svn status now shows:
-{status}
-
-ERROR: {message}
-
-One possible cause of this problem is that there may have been untracked
-files in your working directory that had the same name as tracked files
-in the new revision.
-
-To recover: Clean up the above directory (resolving conflicts, etc.),
-then rerun checkout_externals.
-""".format(cwd=repo_dir_path, message=message, status=status)
-
- fatal_error(errmsg)
-
- @staticmethod
- def _check_url(svn_output, expected_url):
- """Determine the svn url from svn info output and return whether it
- matches the expected value.
-
- """
- url = None
- for line in svn_output.splitlines():
- if SvnRepository.RE_URLLINE.match(line):
- url = line.split(': ')[1].strip()
- break
- if not url:
- status = ExternalStatus.UNKNOWN
- elif url == expected_url:
- status = ExternalStatus.STATUS_OK
- else:
- status = ExternalStatus.MODEL_MODIFIED
-
- if url:
- current_version = '/'.join(url.split('/')[3:])
- else:
- current_version = EMPTY_STR
-
- return status, current_version
-
- def _status_summary(self, stat, repo_dir_path):
- """Report whether the svn repository is in-sync with the model
- description and whether the sandbox is clean or dirty.
-
- """
- svn_output = self._svn_status_xml(repo_dir_path)
- is_dirty = self.xml_status_is_dirty(svn_output)
- if is_dirty:
- stat.clean_state = ExternalStatus.DIRTY
- else:
- stat.clean_state = ExternalStatus.STATUS_OK
-
- # Now save the verbose status output incase the user wants to
- # see it.
- stat.status_output = self._svn_status_verbose(repo_dir_path)
-
- @staticmethod
- def xml_status_is_dirty(svn_output):
- """Parse svn status xml output and determine if the working copy is
- clean or dirty. Dirty is defined as:
-
- * modified files
- * added files
- * deleted files
- * missing files
-
- Unversioned files do not affect the clean/dirty status.
-
- 'external' is also an acceptable state
-
- """
- # pylint: disable=invalid-name
- SVN_EXTERNAL = 'external'
- SVN_UNVERSIONED = 'unversioned'
- # pylint: enable=invalid-name
-
- is_dirty = False
- try:
- xml_status = ET.fromstring(svn_output)
- except BaseException:
- fatal_error(
- "SVN returned invalid XML message {}".format(svn_output))
- xml_target = xml_status.find('./target')
- entries = xml_target.findall('./entry')
- for entry in entries:
- status = entry.find('./wc-status')
- item = status.get('item')
- if item == SVN_EXTERNAL:
- continue
- if item == SVN_UNVERSIONED:
- continue
- is_dirty = True
- break
- return is_dirty
-
- # ----------------------------------------------------------------
- #
- # system call to svn for information gathering
- #
- # ----------------------------------------------------------------
- @staticmethod
- def _svn_info(repo_dir_path):
- """Return results of svn info command
- """
- cmd = ['svn', 'info', repo_dir_path]
- output = execute_subprocess(cmd, output_to_caller=True)
- return output
-
- @staticmethod
- def _svn_status_verbose(repo_dir_path):
- """capture the full svn status output
- """
- cmd = ['svn', 'status', repo_dir_path]
- svn_output = execute_subprocess(cmd, output_to_caller=True)
- return svn_output
-
- @staticmethod
- def _svn_status_xml(repo_dir_path):
- """
- Get status of the subversion sandbox in repo_dir
- """
- cmd = ['svn', 'status', '--xml', repo_dir_path]
- svn_output = execute_subprocess(cmd, output_to_caller=True)
- return svn_output
-
- # ----------------------------------------------------------------
- #
- # system call to svn for sideffects modifying the working tree
- #
- # ----------------------------------------------------------------
- @staticmethod
- def _svn_checkout(url, repo_dir_path, verbosity):
- """
- Checkout a subversion repository (repo_url) to checkout_dir.
- """
- cmd = ['svn', 'checkout', '--quiet', url, repo_dir_path]
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
-
- @staticmethod
- def _svn_switch(url, ignore_ancestry, verbosity):
- """
- Switch branches for in an svn sandbox
- """
- cmd = ['svn', 'switch', '--quiet']
- if ignore_ancestry:
- cmd.append('--ignore-ancestry')
- cmd.append(url)
- if verbosity >= VERBOSITY_VERBOSE:
- printlog(' {0}'.format(' '.join(cmd)))
- execute_subprocess(cmd)
diff --git a/manage_externals/manic/sourcetree.py b/manage_externals/manic/sourcetree.py
deleted file mode 100644
index cf2a5b7569..0000000000
--- a/manage_externals/manic/sourcetree.py
+++ /dev/null
@@ -1,425 +0,0 @@
-"""
-Classes to represent an externals config file (SourceTree) and the components
-within it (_External).
-"""
-
-import errno
-import logging
-import os
-
-from .externals_description import ExternalsDescription
-from .externals_description import read_externals_description_file
-from .externals_description import create_externals_description
-from .repository_factory import create_repository
-from .repository_git import GitRepository
-from .externals_status import ExternalStatus
-from .utils import fatal_error, printlog
-from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR
-from .global_constants import VERBOSITY_VERBOSE
-
-class _External(object):
- """
- A single component hosted in an external repository (and any children).
-
- The component may or may not be checked-out upon construction.
- """
- # pylint: disable=R0902
-
- def __init__(self, root_dir, name, local_path, required, subexternals_path,
- repo, svn_ignore_ancestry, subexternal_sourcetree):
- """Create a single external component (checked out or not).
-
- Input:
- root_dir : string - the (checked-out) parent repo's root dir.
- local_path : string - this external's (checked-out) subdir relative
- to root_dir, e.g. "components/mom"
- repo: Repository - the repo object for this external. Can be None (e.g. if this external just refers to another external file).
-
- name : string - name of this external (as named by the parent
- reference). May or may not correspond to something in the path.
-
- ext_description : dict - source ExternalsDescription object
-
- svn_ignore_ancestry : bool - use --ignore-externals with svn switch
-
- subexternals_path: string - path to sub-externals config file, if any. Relative to local_path, or special value 'none'.
- subexternal_sourcetree: SourceTree - corresponding to subexternals_path, if subexternals_path exists (it might not, if it is not checked out yet).
- """
- self._name = name
- self._required = required
-
- self._stat = None # Populated in status()
-
- self._local_path = local_path
- # _repo_dir_path : full repository directory, e.g.
- # "/components/mom"
- repo_dir = os.path.join(root_dir, local_path)
- self._repo_dir_path = os.path.abspath(repo_dir)
- # _base_dir_path : base directory *containing* the repository, e.g.
- # "/components"
- self._base_dir_path = os.path.dirname(self._repo_dir_path)
- # _repo_dir_name : base_dir_path + repo_dir_name = repo_dir_path
- # e.g., "mom"
- self._repo_dir_name = os.path.basename(self._repo_dir_path)
- self._repo = repo
-
- # Does this component have subcomponents aka an externals config?
- self._subexternals_path = subexternals_path
- self._subexternal_sourcetree = subexternal_sourcetree
-
-
- def get_name(self):
- """
- Return the external object's name
- """
- return self._name
-
- def get_local_path(self):
- """
- Return the external object's path
- """
- return self._local_path
-
- def get_repo_dir_path(self):
- return self._repo_dir_path
-
- def get_subexternals_path(self):
- return self._subexternals_path
-
- def get_repo(self):
- return self._repo
-
- def status(self, force=False, print_progress=False):
- """
- Returns status of this component and all subcomponents.
-
- Returns a dict mapping our local path (not component name!) to an
- ExternalStatus dict. Any subcomponents will have their own top-level
- path keys. Note the return value includes entries for this and all
- subcomponents regardless of whether they are locally installed or not.
-
- Side-effect: If self._stat is empty or force is True, calculates _stat.
- """
- calc_stat = force or not self._stat
-
- if calc_stat:
- self._stat = ExternalStatus()
- self._stat.path = self.get_local_path()
- if not self._required:
- self._stat.source_type = ExternalStatus.OPTIONAL
- elif self._local_path == LOCAL_PATH_INDICATOR:
- # LOCAL_PATH_INDICATOR, '.' paths, are standalone
- # component directories that are not managed by
- # checkout_subexternals.
- self._stat.source_type = ExternalStatus.STANDALONE
- else:
- # managed by checkout_subexternals
- self._stat.source_type = ExternalStatus.MANAGED
-
- subcomponent_stats = {}
- if not os.path.exists(self._repo_dir_path):
- if calc_stat:
- # No local repository.
- self._stat.sync_state = ExternalStatus.EMPTY
- msg = ('status check: repository directory for "{0}" does not '
- 'exist.'.format(self._name))
- logging.info(msg)
- self._stat.current_version = 'not checked out'
- # NOTE(bja, 2018-01) directory doesn't exist, so we cannot
- # use repo to determine the expected version. We just take
- # a best-guess based on the assumption that only tag or
- # branch should be set, but not both.
- if not self._repo:
- self._stat.expected_version = 'unknown'
- else:
- self._stat.expected_version = self._repo.tag() + self._repo.branch()
- else:
- # Merge local repository state (e.g. clean/dirty) into self._stat.
- if calc_stat and self._repo:
- self._repo.status(self._stat, self._repo_dir_path)
-
- # Status of subcomponents, if any.
- if self._subexternals_path and self._subexternal_sourcetree:
- cwd = os.getcwd()
- # SourceTree.status() expects to be called from the correct
- # root directory.
- os.chdir(self._repo_dir_path)
- subcomponent_stats = self._subexternal_sourcetree.status(self._local_path, force=force, print_progress=print_progress)
- os.chdir(cwd)
-
- # Merge our status + subcomponent statuses into one return dict keyed
- # by component path.
- all_stats = {}
- # don't add the root component because we don't manage it
- # and can't provide useful info about it.
- if self._local_path != LOCAL_PATH_INDICATOR:
- # store the stats under the local_path, not comp name so
- # it will be sorted correctly
- all_stats[self._stat.path] = self._stat
-
- if subcomponent_stats:
- all_stats.update(subcomponent_stats)
-
- return all_stats
-
- def checkout(self, verbosity):
- """
- If the repo destination directory exists, ensure it is correct (from
- correct URL, correct branch or tag), and possibly updateit.
- If the repo destination directory does not exist, checkout the correct
- branch or tag.
- Does not check out sub-externals, see SourceTree.checkout().
- """
- # Make sure we are in correct location
- if not os.path.exists(self._repo_dir_path):
- # repository directory doesn't exist. Need to check it
- # out, and for that we need the base_dir_path to exist
- try:
- os.makedirs(self._base_dir_path)
- except OSError as error:
- if error.errno != errno.EEXIST:
- msg = 'Could not create directory "{0}"'.format(
- self._base_dir_path)
- fatal_error(msg)
-
- if not self._stat:
- self.status()
- assert self._stat
-
- if self._stat.source_type != ExternalStatus.STANDALONE:
- if verbosity >= VERBOSITY_VERBOSE:
- # NOTE(bja, 2018-01) probably do not want to pass
- # verbosity in this case, because if (verbosity ==
- # VERBOSITY_DUMP), then the previous status output would
- # also be dumped, adding noise to the output.
- self._stat.log_status_message(VERBOSITY_VERBOSE)
-
- if self._repo:
- if self._stat.sync_state == ExternalStatus.STATUS_OK:
- # If we're already in sync, avoid showing verbose output
- # from the checkout command, unless the verbosity level
- # is 2 or more.
- checkout_verbosity = verbosity - 1
- else:
- checkout_verbosity = verbosity
-
- self._repo.checkout(self._base_dir_path, self._repo_dir_name,
- checkout_verbosity, self.clone_recursive())
-
- def replace_subexternal_sourcetree(self, sourcetree):
- self._subexternal_sourcetree = sourcetree
-
- def clone_recursive(self):
- 'Return True iff any .gitmodules files should be processed'
- # Try recursive .gitmodules unless there is an externals entry
- recursive = not self._subexternals_path
-
- return recursive
-
-
-class SourceTree(object):
- """
- SourceTree represents a group of managed externals.
-
- Those externals may not be checked out locally yet, they might only
- have Repository objects pointing to their respective repositories.
- """
-
- @classmethod
- def from_externals_file(cls, parent_repo_dir_path, parent_repo,
- externals_path):
- """Creates a SourceTree representing the given externals file.
-
- Looks up a git submodules file as an optional backup if there is no
- externals file specified.
-
- Returns None if there is no externals file (i.e. it's None or 'none'),
- or if the externals file hasn't been checked out yet.
-
- parent_repo_dir_path: parent repo root dir
- parent_repo: parent repo.
- externals_path: path to externals file, relative to parent_repo_dir_path.
- """
- if not os.path.exists(parent_repo_dir_path):
- # NOTE(bja, 2017-10) repository has not been checked out
- # yet, can't process the externals file. Assume we are
- # checking status before code is checkoud out and this
- # will be handled correctly later.
- return None
-
- if externals_path.lower() == 'none':
- # With explicit 'none', do not look for git submodules file.
- return None
-
- cwd = os.getcwd()
- os.chdir(parent_repo_dir_path)
-
- if not externals_path:
- if GitRepository.has_submodules(parent_repo_dir_path):
- externals_path = ExternalsDescription.GIT_SUBMODULES_FILENAME
- else:
- return None
-
- if not os.path.exists(externals_path):
- # NOTE(bja, 2017-10) this check is redundant with the one
- # in read_externals_description_file!
- msg = ('Externals description file "{0}" '
- 'does not exist! In directory: {1}'.format(
- externals_path, parent_repo_dir_path))
- fatal_error(msg)
-
- externals_root = parent_repo_dir_path
- # model_data is a dict-like object which mirrors the file format.
- model_data = read_externals_description_file(externals_root,
- externals_path)
- # ext_description is another dict-like object (see ExternalsDescription)
- ext_description = create_externals_description(model_data,
- parent_repo=parent_repo)
- externals_sourcetree = SourceTree(externals_root, ext_description)
- os.chdir(cwd)
- return externals_sourcetree
-
- def __init__(self, root_dir, ext_description, svn_ignore_ancestry=False):
- """
- Build a SourceTree object from an ExternalDescription.
-
- root_dir: the (checked-out) parent repo root dir.
- """
- self._root_dir = os.path.abspath(root_dir)
- self._all_components = {} # component_name -> _External
- self._required_compnames = []
- for comp, desc in ext_description.items():
- local_path = desc[ExternalsDescription.PATH]
- required = desc[ExternalsDescription.REQUIRED]
- repo_info = desc[ExternalsDescription.REPO]
- subexternals_path = desc[ExternalsDescription.EXTERNALS]
-
- repo = create_repository(comp,
- repo_info,
- svn_ignore_ancestry=svn_ignore_ancestry)
-
- sourcetree = None
- # Treat a .gitmodules file as a backup externals config
- if not subexternals_path:
- parent_repo_dir_path = os.path.abspath(os.path.join(root_dir,
- local_path))
- if GitRepository.has_submodules(parent_repo_dir_path):
- subexternals_path = ExternalsDescription.GIT_SUBMODULES_FILENAME
-
- # Might return None (if the subexternal isn't checked out yet, or subexternal is None or 'none')
- subexternal_sourcetree = SourceTree.from_externals_file(
- os.path.join(self._root_dir, local_path),
- repo,
- subexternals_path)
- src = _External(self._root_dir, comp, local_path, required,
- subexternals_path, repo, svn_ignore_ancestry,
- subexternal_sourcetree)
-
- self._all_components[comp] = src
- if required:
- self._required_compnames.append(comp)
-
- def status(self, relative_path_base=LOCAL_PATH_INDICATOR,
- force=False, print_progress=False):
- """Return a dictionary of local path->ExternalStatus.
-
- Notes about the returned dictionary:
- * It is keyed by local path (e.g. 'components/mom'), not by
- component name (e.g. 'mom').
- * It contains top-level keys for all traversed components, whether
- discovered by recursion or top-level.
- * It contains entries for all components regardless of whether they
- are locally installed or not, or required or optional.
-x """
- load_comps = self._all_components.keys()
-
- summary = {} # Holds merged statuses from all components.
- for comp in load_comps:
- if print_progress:
- printlog('{0}, '.format(comp), end='')
- stat = self._all_components[comp].status(force=force,
- print_progress=print_progress)
-
- # Returned status dictionary is keyed by local path; prepend
- # relative_path_base if not already there.
- stat_final = {}
- for name in stat.keys():
- if stat[name].path.startswith(relative_path_base):
- stat_final[name] = stat[name]
- else:
- modified_path = os.path.join(relative_path_base,
- stat[name].path)
- stat_final[modified_path] = stat[name]
- stat_final[modified_path].path = modified_path
- summary.update(stat_final)
-
- return summary
-
- def _find_installed_optional_components(self):
- """Returns a list of installed optional component names, if any."""
- installed_comps = []
- for comp_name, ext in self._all_components.items():
- if comp_name in self._required_compnames:
- continue
- # Note that in practice we expect this status to be cached.
- path_to_stat = ext.status()
-
- # If any part of this component exists locally, consider it
- # installed and therefore eligible for updating.
- if any(s.sync_state != ExternalStatus.EMPTY
- for s in path_to_stat.values()):
- installed_comps.append(comp_name)
- return installed_comps
-
- def checkout(self, verbosity, load_all, load_comp=None):
- """
- Checkout or update indicated components into the configured subdirs.
-
- If load_all is True, checkout all externals (required + optional), recursively.
- If load_all is False and load_comp is set, checkout load_comp (and any required subexternals, plus any optional subexternals that are already checked out, recursively)
- If load_all is False and load_comp is None, checkout all required externals, plus any optionals that are already checked out, recursively.
- """
- if load_all:
- tmp_comps = self._all_components.keys()
- elif load_comp is not None:
- tmp_comps = [load_comp]
- else:
- local_optional_compnames = self._find_installed_optional_components()
- tmp_comps = self._required_compnames + local_optional_compnames
- if local_optional_compnames:
- printlog('Found locally installed optional components: ' +
- ', '.join(local_optional_compnames))
- bad_compnames = set(local_optional_compnames) - set(self._all_components.keys())
- if bad_compnames:
- printlog('Internal error: found locally installed components that are not in the global list of all components: ' + ','.join(bad_compnames))
-
- if verbosity >= VERBOSITY_VERBOSE:
- printlog('Checking out externals: ')
- else:
- printlog('Checking out externals: ', end='')
-
- # Sort by path so that if paths are nested the
- # parent repo is checked out first.
- load_comps = sorted(tmp_comps, key=lambda comp: self._all_components[comp].get_local_path())
-
- # checkout.
- for comp_name in load_comps:
- if verbosity < VERBOSITY_VERBOSE:
- printlog('{0}, '.format(comp_name), end='')
- else:
- # verbose output handled by the _External object, just
- # output a newline
- printlog(EMPTY_STR)
- c = self._all_components[comp_name]
- # Does not recurse.
- c.checkout(verbosity)
- # Recursively check out subexternals, if any. Returns None
- # if there's no subexternals path.
- component_subexternal_sourcetree = SourceTree.from_externals_file(
- c.get_repo_dir_path(),
- c.get_repo(),
- c.get_subexternals_path())
- c.replace_subexternal_sourcetree(component_subexternal_sourcetree)
- if component_subexternal_sourcetree:
- component_subexternal_sourcetree.checkout(verbosity, load_all)
- printlog('')
diff --git a/manage_externals/test/.coveragerc b/manage_externals/test/.coveragerc
deleted file mode 100644
index 8b681888b8..0000000000
--- a/manage_externals/test/.coveragerc
+++ /dev/null
@@ -1,7 +0,0 @@
-[run]
-branch = True
-omit = test_unit_*.py
- test_sys_*.py
- /usr/*
- .local/*
- */site-packages/*
\ No newline at end of file
diff --git a/manage_externals/test/.gitignore b/manage_externals/test/.gitignore
deleted file mode 100644
index dd5795998f..0000000000
--- a/manage_externals/test/.gitignore
+++ /dev/null
@@ -1,7 +0,0 @@
-# virtual environments
-env_python*
-
-# python code coverage tool output
-.coverage
-htmlcov
-
diff --git a/manage_externals/test/.pylint.rc b/manage_externals/test/.pylint.rc
deleted file mode 100644
index 64abd03e42..0000000000
--- a/manage_externals/test/.pylint.rc
+++ /dev/null
@@ -1,426 +0,0 @@
-[MASTER]
-
-# A comma-separated list of package or module names from where C extensions may
-# be loaded. Extensions are loading into the active Python interpreter and may
-# run arbitrary code
-extension-pkg-whitelist=
-
-# Add files or directories to the blacklist. They should be base names, not
-# paths.
-ignore=.git,.svn,env2
-
-# Add files or directories matching the regex patterns to the blacklist. The
-# regex matches against base names, not paths.
-ignore-patterns=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Use multiple processes to speed up Pylint.
-jobs=1
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-# Pickle collected data for later comparisons.
-persistent=yes
-
-# Specify a configuration file.
-#rcfile=
-
-# Allow loading of arbitrary C extensions. Extensions are imported into the
-# active Python interpreter and may run arbitrary code.
-unsafe-load-any-extension=no
-
-
-[MESSAGES CONTROL]
-
-# Only show warnings with the listed confidence levels. Leave empty to show
-# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
-confidence=
-
-# Disable the message, report, category or checker with the given id(s). You
-# can either give multiple identifiers separated by comma (,) or put this
-# option multiple times (only on the command line, not in the configuration
-# file where it should appear only once).You can also use "--disable=all" to
-# disable everything first and then reenable specific checks. For example, if
-# you want to run only the similarities checker, you can use "--disable=all
-# --enable=similarities". If you want to run only the classes checker, but have
-# no Warning level messages displayed, use"--disable=all --enable=classes
-# --disable=W"
-disable=bad-continuation,useless-object-inheritance
-
-
-# Enable the message, report, category or checker with the given id(s). You can
-# either give multiple identifier separated by comma (,) or put this option
-# multiple time (only on the command line, not in the configuration file where
-# it should appear only once). See also the "--disable" option for examples.
-enable=
-
-
-[REPORTS]
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note). You have access to the variables errors warning, statement which
-# respectively contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (RP0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Template used to display messages. This is a python new-style format string
-# used to format the message information. See doc for all details
-msg-template={msg_id}:{line:3d},{column:2d}: {msg} ({symbol})
-
-# Set the output format. Available formats are text, parseable, colorized, json
-# and msvs (visual studio).You can also give a reporter class, eg
-# mypackage.mymodule.MyReporterClass.
-output-format=text
-
-# Tells whether to display a full report or only the messages
-#reports=yes
-
-# Activate the evaluation score.
-score=yes
-
-
-[REFACTORING]
-
-# Maximum number of nested blocks for function / method body
-max-nested-blocks=5
-
-
-[BASIC]
-
-# Naming hint for argument names
-argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Regular expression matching correct argument names
-argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Naming hint for attribute names
-attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Regular expression matching correct attribute names
-attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=foo,bar,baz,toto,tutu,tata
-
-# Naming hint for class attribute names
-class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
-
-# Regular expression matching correct class attribute names
-class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
-
-# Naming hint for class names
-class-name-hint=[A-Z_][a-zA-Z0-9]+$
-
-# Regular expression matching correct class names
-class-rgx=[A-Z_][a-zA-Z0-9]+$
-
-# Naming hint for constant names
-const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
-
-# Regular expression matching correct constant names
-const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
-
-# Minimum line length for functions/classes that require docstrings, shorter
-# ones are exempt.
-docstring-min-length=-1
-
-# Naming hint for function names
-function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Regular expression matching correct function names
-function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=i,j,k,ex,Run,_
-
-# Include a hint for the correct naming format with invalid-name
-include-naming-hint=no
-
-# Naming hint for inline iteration names
-inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
-
-# Regular expression matching correct inline iteration names
-inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-
-# Naming hint for method names
-method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Regular expression matching correct method names
-method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Naming hint for module names
-module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Regular expression matching correct module names
-module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Colon-delimited sets of names that determine each other's naming style when
-# the name regexes allow several styles.
-name-group=
-
-# Regular expression which should only match function or class names that do
-# not require a docstring.
-no-docstring-rgx=^_
-
-# List of decorators that produce properties, such as abc.abstractproperty. Add
-# to this list to register other decorators that produce valid properties.
-property-classes=abc.abstractproperty
-
-# Naming hint for variable names
-variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-# Regular expression matching correct variable names
-variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
-
-
-[FORMAT]
-
-# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
-expected-line-ending-format=
-
-# Regexp for a line that is allowed to be longer than the limit.
-ignore-long-lines=^\s*(# )??$
-
-# Number of spaces of indent required inside a hanging or continued line.
-indent-after-paren=4
-
-# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
-# tab).
-indent-string=' '
-
-# Maximum number of characters on a single line.
-max-line-length=100
-
-# Maximum number of lines in a module
-max-module-lines=1000
-
-# List of optional constructs for which whitespace checking is disabled. `dict-
-# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
-# `trailing-comma` allows a space between comma and closing bracket: (a, ).
-# `empty-line` allows space-only lines.
-no-space-check=trailing-comma,dict-separator
-
-# Allow the body of a class to be on the same line as the declaration if body
-# contains single statement.
-single-line-class-stmt=no
-
-# Allow the body of an if to be on the same line as the test if there is no
-# else.
-single-line-if-stmt=no
-
-
-[LOGGING]
-
-# Logging modules to check that the string format arguments are in logging
-# function parameter format
-logging-modules=logging
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,XXX,TODO
-
-
-[SIMILARITIES]
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-# Ignore imports when computing similarities.
-ignore-imports=no
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-
-[SPELLING]
-
-# Spelling dictionary name. Available dictionaries: none. To make it working
-# install python-enchant package.
-spelling-dict=
-
-# List of comma separated words that should not be checked.
-spelling-ignore-words=
-
-# A path to a file that contains private dictionary; one word per line.
-spelling-private-dict-file=
-
-# Tells whether to store unknown words to indicated private dictionary in
-# --spelling-private-dict-file option instead of raising a message.
-spelling-store-unknown-words=no
-
-
-[TYPECHECK]
-
-# List of decorators that produce context managers, such as
-# contextlib.contextmanager. Add to this list to register other decorators that
-# produce valid context managers.
-contextmanager-decorators=contextlib.contextmanager
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E1101 when accessed. Python regular
-# expressions are accepted.
-generated-members=
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# This flag controls whether pylint should warn about no-member and similar
-# checks whenever an opaque object is returned when inferring. The inference
-# can return multiple potential results while evaluating a Python object, but
-# some branches might not be evaluated, which results in partial inference. In
-# that case, it might be useful to still emit no-member and other checks for
-# the rest of the inferred objects.
-ignore-on-opaque-inference=yes
-
-# List of class names for which member attributes should not be checked (useful
-# for classes with dynamically set attributes). This supports the use of
-# qualified names.
-ignored-classes=optparse.Values,thread._local,_thread._local
-
-# List of module names for which member attributes should not be checked
-# (useful for modules/projects where namespaces are manipulated during runtime
-# and thus existing member attributes cannot be deduced by static analysis. It
-# supports qualified module names, as well as Unix pattern matching.
-ignored-modules=
-
-# Show a hint with possible names when a member name was not found. The aspect
-# of finding the hint is based on edit distance.
-missing-member-hint=yes
-
-# The minimum edit distance a name should have in order to be considered a
-# similar match for a missing member name.
-missing-member-hint-distance=1
-
-# The total number of similar names that should be taken in consideration when
-# showing a hint for a missing member.
-missing-member-max-choices=1
-
-
-[VARIABLES]
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-# Tells whether unused global variables should be treated as a violation.
-allow-global-unused-variables=yes
-
-# List of strings which can identify a callback function by name. A callback
-# name must start or end with one of those strings.
-callbacks=cb_,_cb
-
-# A regular expression matching the name of dummy variables (i.e. expectedly
-# not used).
-dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
-
-# Argument names that match this expression will be ignored. Default to name
-# with leading underscore
-ignored-argument-names=_.*|^ignored_|^unused_
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# List of qualified module names which can have objects that can redefine
-# builtins.
-redefining-builtins-modules=six.moves,future.builtins
-
-
-[CLASSES]
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,__new__,setUp
-
-# List of member names, which should be excluded from the protected access
-# warning.
-exclude-protected=_asdict,_fields,_replace,_source,_make
-
-# List of valid names for the first argument in a class method.
-valid-classmethod-first-arg=cls
-
-# List of valid names for the first argument in a metaclass class method.
-valid-metaclass-classmethod-first-arg=mcs
-
-
-[DESIGN]
-
-# Maximum number of arguments for function / method
-max-args=5
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=7
-
-# Maximum number of boolean expressions in a if statement
-max-bool-expr=5
-
-# Maximum number of branch for function / method body
-max-branches=12
-
-# Maximum number of locals for function / method body
-max-locals=15
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=20
-
-# Maximum number of return / yield for function / method body
-max-returns=6
-
-# Maximum number of statements in function / method body
-max-statements=50
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-
-[IMPORTS]
-
-# Allow wildcard imports from modules that define __all__.
-allow-wildcard-with-all=no
-
-# Analyse import fallback blocks. This can be used to support both Python 2 and
-# 3 compatible code, which means that the block might have code that exists
-# only in one or another interpreter, leading to false positives when analysed.
-analyse-fallback-blocks=no
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,TERMIOS,Bastion,rexec
-
-# Create a graph of external dependencies in the given file (report RP0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report RP0402 must not be disabled)
-import-graph=
-
-# Create a graph of internal dependencies in the given file (report RP0402 must
-# not be disabled)
-int-import-graph=
-
-# Force import order to recognize a module as part of the standard
-# compatibility libraries.
-known-standard-library=
-
-# Force import order to recognize a module as part of a third party library.
-known-third-party=enchant
-
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when being caught. Defaults to
-# "Exception"
-overgeneral-exceptions=Exception
diff --git a/manage_externals/test/Makefile b/manage_externals/test/Makefile
deleted file mode 100644
index 293e360757..0000000000
--- a/manage_externals/test/Makefile
+++ /dev/null
@@ -1,124 +0,0 @@
-python = not-set
-verbose = not-set
-debug = not-set
-
-ifneq ($(python), not-set)
-PYTHON=$(python)
-else
-PYTHON=python
-endif
-
-# we need the python path to point one level up to access the package
-# and executables
-PYPATH=PYTHONPATH=..:
-
-# common args for running tests
-TEST_ARGS=-m unittest discover
-
-ifeq ($(debug), not-set)
- ifeq ($(verbose), not-set)
- # summary only output
- TEST_ARGS+=--buffer
- else
- # show individual test summary
- TEST_ARGS+=--buffer --verbose
- endif
-else
- # show detailed test output
- TEST_ARGS+=--verbose
-endif
-
-
-# auto reformat the code
-AUTOPEP8=autopep8
-AUTOPEP8_ARGS=--aggressive --in-place
-
-# run lint
-PYLINT=pylint
-PYLINT_ARGS=-j 2 --rcfile=.pylint.rc
-
-# code coverage
-COVERAGE=coverage
-COVERAGE_ARGS=--rcfile=.coveragerc
-
-# source files
-SRC = \
- ../checkout_externals \
- ../manic/*.py
-
-CHECKOUT_EXE = ../checkout_externals
-
-TEST_DIR = .
-
-README = ../README.md
-
-#
-# testing
-#
-.PHONY : utest
-utest : FORCE
- $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_unit_*.py'
-
-.PHONY : stest
-stest : FORCE
- $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_sys_*.py'
-
-.PHONY : test
-test : utest stest
-
-#
-# documentation
-#
-.PHONY : readme
-readme : $(CHECKOUT_EXE)
- printf "%s\n\n" "-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --" > $(README)
- printf "%s" '[![Build Status](https://travis-ci.org/ESMCI/manage_externals.svg?branch=master)](https://travis-ci.org/ESMCI/manage_externals)' >> $(README)
- printf "%s" '[![Coverage Status](https://coveralls.io/repos/github/ESMCI/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/ESMCI/manage_externals?branch=master)' >> $(README)
- printf "\n%s\n" '```' >> $(README)
- $(CHECKOUT_EXE) --help >> $(README)
-
-#
-# coding standards
-#
-.PHONY : style
-style : FORCE
- $(AUTOPEP8) $(AUTOPEP8_ARGS) --recursive $(SRC) $(TEST_DIR)/test_*.py
-
-.PHONY : lint
-lint : FORCE
- $(PYLINT) $(PYLINT_ARGS) $(SRC) $(TEST_DIR)/test_*.py
-
-.PHONY : stylint
-stylint : style lint
-
-.PHONY : coverage
-# Need to use a single coverage run with a single pattern rather than
-# using two separate commands with separate patterns for test_unit_*.py
-# and test_sys_*.py: The latter clobbers some results from the first
-# run, even if we use the --append flag to 'coverage run'.
-coverage : FORCE
- $(PYPATH) $(COVERAGE) erase
- $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_*.py'
- $(PYPATH) $(COVERAGE) html
-
-#
-# virtual environment creation
-#
-.PHONY : env
-env : FORCE
- $(PYPATH) virtualenv --python $(PYTHON) $@_$(PYTHON)
- . $@_$(PYTHON)/bin/activate; pip install -r requirements.txt
-
-#
-# utilites
-#
-.PHONY : clean
-clean : FORCE
- -rm -rf *~ *.pyc tmp fake htmlcov
-
-.PHONY : clobber
-clobber : clean
- -rm -rf env_*
-
-FORCE :
-
diff --git a/manage_externals/test/README.md b/manage_externals/test/README.md
deleted file mode 100644
index 1e8f2eaa77..0000000000
--- a/manage_externals/test/README.md
+++ /dev/null
@@ -1,53 +0,0 @@
-# Testing for checkout_externals
-
-## Unit tests
-
-```SH
- cd checkout_externals/test
- make utest
-```
-
-## System tests
-
-```SH
- cd checkout_externals/test
- make stest
-```
-
-Example to run a single test:
-```SH
- cd checkout_externals
- python -m unittest test.test_sys_checkout.TestSysCheckout.test_container_simple_required
-```
-
-## Static analysis
-
-checkout_externals is difficult to test thoroughly because it relies
-on git and svn, and svn requires a live network connection and
-repository. Static analysis will help catch bugs in code paths that
-are not being executed, but it requires conforming to community
-standards and best practices. autopep8 and pylint should be run
-regularly for automatic code formatting and linting.
-
-```SH
- cd checkout_externals/test
- make lint
-```
-
-The canonical formatting for the code is whatever autopep8
-generates. All issues identified by pylint should be addressed.
-
-
-## Code coverage
-
-All changes to the code should include maintaining existing tests and
-writing new tests for new or changed functionality. To ensure test
-coverage, run the code coverage tool:
-
-```SH
- cd checkout_externals/test
- make coverage
- open -a Firefox.app htmlcov/index.html
-```
-
-
diff --git a/manage_externals/test/doc/.gitignore b/manage_externals/test/doc/.gitignore
deleted file mode 100644
index d4e11e5ea0..0000000000
--- a/manage_externals/test/doc/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-_build
-
diff --git a/manage_externals/test/doc/conf.py b/manage_externals/test/doc/conf.py
deleted file mode 100644
index 469c0b0dc5..0000000000
--- a/manage_externals/test/doc/conf.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Manage Externals documentation build configuration file, created by
-# sphinx-quickstart on Wed Nov 29 10:53:25 2017.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-# import os
-# import sys
-# sys.path.insert(0, os.path.abspath('.'))
-
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = ['sphinx.ext.autodoc',
- 'sphinx.ext.todo',
- 'sphinx.ext.coverage',
- 'sphinx.ext.viewcode',
- 'sphinx.ext.githubpages']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'Manage Externals'
-copyright = u'2017, CSEG at NCAR'
-author = u'CSEG at NCAR'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = u'1.0.0'
-# The full version, including alpha/beta/rc tags.
-release = u'1.0.0'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = None
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = True
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = 'alabaster'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#
-# html_theme_options = {}
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# Custom sidebar templates, must be a dictionary that maps document names
-# to template names.
-#
-# This is required for the alabaster theme
-# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
-html_sidebars = {
- '**': [
- 'relations.html', # needs 'show_related': True theme option to display
- 'searchbox.html',
- ]
-}
-
-
-# -- Options for HTMLHelp output ------------------------------------------
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'ManageExternalsdoc'
-
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- # 'papersize': 'letterpaper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- # 'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- # 'preamble': '',
-
- # Latex figure (float) alignment
- #
- # 'figure_align': 'htbp',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (master_doc, 'ManageExternals.tex', u'Manage Externals Documentation',
- u'CSEG at NCAR', 'manual'),
-]
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- (master_doc, 'manageexternals', u'Manage Externals Documentation',
- [author], 1)
-]
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- (master_doc, 'ManageExternals', u'Manage Externals Documentation',
- author, 'ManageExternals', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-
-
diff --git a/manage_externals/test/doc/develop.rst b/manage_externals/test/doc/develop.rst
deleted file mode 100644
index b817b7b093..0000000000
--- a/manage_externals/test/doc/develop.rst
+++ /dev/null
@@ -1,202 +0,0 @@
-Developer Guidelines
-====================
-
-The manage externals utilities are a light weight replacement for svn
-externals that will work with git repositories pulling in a mixture of
-git and svn dependencies.
-
-Given an externals description and a working copy:
-
-* *checkout_externals* attempts to make the working copy agree with the
- externals description
-
-* *generate_externals* attempts to make the externals description agree
- with the working copy.
-
-For these operations utilities should:
-
-* operate consistently across git and svn
-
-* operate simply with minimal user complexity
-
-* robustly across a wide range of repository states
-
-* provide explicit error messages when a problem occurs
-
-* leave the working copy in a valid state
-
-The utilities in manage externals are **NOT** generic wrappers around
-revision control operations or a replacement for common tasks. Users
-are expected to:
-
-* create branches prior to starting development
-
-* add remotes and push changes
-
-* create tags
-
-* delete branches
-
-These types of tasks are often highly workflow dependent, e.g. branch
-naming conventions may vary between repositories, have the potential
-to destroy user data, introduce significant code complexit and 'edge
-cases' that are extremely difficult to detect and test, and often
-require subtle decision making, especially if a problem occurs.
-
-Users who want to automate these types are encouraged to create their
-own tools. The externals description files are explicitly versioned
-and the internal APIs are intended to be stable for these purposes.
-
-Core Design Principles
------------------------
-
-1. Users can, and are actively encouraged to, modify the externals
- directories using revision control outside of manage_externals
- tools. You can't make any assumptions about the state of the
- working copy. Examples: adding a remote, creating a branch,
- switching to a branch, deleting the directory entirely.
-
-2. Give that the user can do anything, the manage externals library
- can not preserve state between calls. The only information it can
- rely on is what it expectes based on the content of the externals
- description file, and what the actual state of the directory tree
- is.
-
-3. Do *not* do anything that will possibly destroy user data!
-
- a. Do not remove files from the file system. We are operating on
- user supplied input. If you don't call 'rm', you can't
- accidentally remove the user's data. Thinking of calling
- ``shutil.rmtree(user_input)``? What if the user accidentally
- specified user_input such that it resolves to their home
- directory.... Yeah. Don't go there.
-
- b. Rely on git and svn to do their job as much as possible. Don't
- duplicate functionality. Examples:
-
- i. We require the working copies to be 'clean' as reported by
- ``git status`` and ``svn status``. What if there are misc
- editor files floating around that prevent an update? Use the
- git and svn ignore functionality so they are not
- reported. Don't try to remove them from manage_externals or
- determine if they are 'safe' to ignore.
-
- ii. Do not use '--force'. Ever. This is a sign you are doing
- something dangerous, it may not be what the user
- wants. Remember, they are encouraged to modify their repo.
-
-4. There are often multiple ways to obtain a particular piece of
- information from git. Scraping screen output is brittle and
- generally not considered a stable API across different versions of
- git. Given a choice between:
-
- a. a lower level git 'plumbing' command that processes a
- specific request and returns a sucess/failure status.
-
- b. high level git command that produces a bunch of output
- that must be processed.
-
- We always prefer the former. It almost always involves
- writing and maintaining less code and is more likely to be
- stable.
-
-5. Backward compatibility is critical. We have *nested*
- repositories. They are trivially easy to change versions. They may
- have very different versions of the top level manage_externals. The
- ability to read and work with old model description files is
- critical to avoid problems for users. We also have automated tools
- (testdb) that must generate and read external description
- files. Backward compatibility will make staging changes vastly
- simpler.
-
-Model Users
------------
-
-Consider the needs of the following model userswhen developing manage_externals:
-
-* Users who will checkout the code once, and never change versions.
-
-* Users who will checkout the code once, then work for several years,
- never updating. before trying to update or request integration.
-
-* Users develope code but do not use revision control beyond the
- initial checkout. If they have modified or untracked files in the
- repo, they may be irreplacable. Don't destroy user data.
-
-* Intermediate users who are working with multiple repos or branches
- on a regular basis. They may only use manage_externals weekly or
- monthly. Keep the user interface and documentation simple and
- explicit. The more command line options they have to remember or
- look up, the more frustrated they git.
-
-* Software engineers who use the tools multiple times a day. It should
- get out of their way.
-
-User Interface
---------------
-
-Basic operation for the most standard use cases should be kept as
-simple as possible. Many users will only rarely run the manage
-utilities. Even advanced users don't like reading a lot of help
-documentation or struggling to remember commands and piece together
-what they need to run. Having many command line options, even if not
-needed, is exteremly frustrating and overwhelming for most users. A few
-simple, explicitly named commands are better than a single command
-with many options.
-
-How will users get help if something goes wrong? This is a custom,
-one-off solution. Searching the internet for manage_externals, will
-only return the user doc for this project at best. There isn't likely
-to be a stackoverflow question or blog post where someone else already
-answered a user's question. And very few people outside this community
-will be able to provide help if something goes wrong. The sooner we
-kick users out of these utilities and into standard version control
-tools, the better off they are going to be if they run into a problem.
-
-Repositories
-------------
-
-There are three basic types of repositories that must be considered:
-
-* container repositories - repositories that are always top level
- repositories, and have a group of externals that must be managed.
-
-* simple repositories - repositories that are externals to another
- repository, and do not have any of their own externals that will be
- managed.
-
-* mixed use repositories - repositories that can act as a top level
- container repository or as an external to a top level
- container. They may also have their own sub-externals that are
- required. They may have different externals needs depening on
- whether they are top level or not.
-
-Repositories must be able to checkout and switch to both branches and
-tags.
-
-Development
-===========
-
-The functionality to manage externals is broken into a library of core
-functionality and applications built with the library.
-
-The core library is called 'manic', pseduo-homophone of (man)age
-(ex)ternals that is: short, pronounceable and spell-checkable. It is
-also no more or less meaningful to an unfamiliar user than a random
-jumble of letters forming an acronym.
-
-The core architecture of manic is:
-
-* externals description - an abstract description on an external,
- including of how to obtain it, where to obtain it, where it goes in
- the working tree.
-
-* externals - the software object representing an external.
-
-* source trees - collection of externals
-
-* repository wrappers - object oriented wrappers around repository
- operations. So the higher level management of the soure tree and
- external does not have to be concerned with how a particular
- external is obtained and managed.
-
diff --git a/manage_externals/test/doc/index.rst b/manage_externals/test/doc/index.rst
deleted file mode 100644
index 9ab287ad8c..0000000000
--- a/manage_externals/test/doc/index.rst
+++ /dev/null
@@ -1,22 +0,0 @@
-.. Manage Externals documentation master file, created by
- sphinx-quickstart on Wed Nov 29 10:53:25 2017.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
-
-Welcome to Manage Externals's documentation!
-============================================
-
-.. toctree::
- :maxdepth: 2
- :caption: Contents:
-
-
- develop.rst
- testing.rst
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
diff --git a/manage_externals/test/doc/testing.rst b/manage_externals/test/doc/testing.rst
deleted file mode 100644
index 623f0e431c..0000000000
--- a/manage_externals/test/doc/testing.rst
+++ /dev/null
@@ -1,123 +0,0 @@
-Testing
-=======
-
-The manage_externals package has an automated test suite. All pull
-requests are expected to pass 100% of the automated tests, as well as
-be pep8 and lint 'clean' and maintain approximately constant (at a
-minimum) level of code coverage.
-
-Quick Start
------------
-
-Do nothing approach
-~~~~~~~~~~~~~~~~~~~
-
-When you create a pull request on GitHub, Travis-CI continuous
-integration testing will run the test suite in both python2 and
-python3. Test results, lint results, and code coverage results are
-available online.
-
-Do something approach
-~~~~~~~~~~~~~~~~~~~~~
-
-In the test directory, run:
-
-.. code-block:: shell
-
- make env
- make lint
- make test
- make coverage
-
-
-Automated Testing
------------------
-
-The manage_externals manic library and executables are developed to be
-python2 and python3 compatible using only the standard library. The
-test suites meet the same requirements. But additional tools are
-required to provide lint and code coverage metrics and generate
-documentation. The requirements are maintained in the requirements.txt
-file, and can be automatically installed into an isolated environment
-via Makefile.
-
-Bootstrap requirements:
-
-* python2 - version 2.7.x or later
-
-* python3 - version 3.6 tested other versions may work
-
-* pip and virtualenv for python2 and python3
-
-Note: all make rules can be of the form ``make python=pythonX rule``
-or ``make rule`` depending if you want to use the default system
-python or specify a specific version.
-
-The Makefile in the test directory has the following rules:
-
-* ``make python=pythonX env`` - create a python virtual environment
- for python2 or python3 and install all required packages. These
- packages are required to run lint or coverage.
-
-* ``make style`` - runs autopep8
-
-* ``make lint`` - runs autopep8 and pylint
-
-* ``make test`` - run the full test suite
-
-* ``make utest`` - run jus the unit tests
-
-* ``make stest`` - run jus the system integration tests
-
-* ``make coverage`` - run the full test suite through the code
- coverage tool and generate an html report.
-
-* ``make readme`` - automatically generate the README files.
-
-* ``make clean`` - remove editor and pyc files
-
-* ``make clobber`` - remove all generated test files, including
- virtual environments, coverage reports, and temporary test
- repository directories.
-
-Unit Tests
-----------
-
-Unit tests are probably not 'true unit tests' for the pedantic, but
-are pragmatic unit tests. They cover small practicle code blocks:
-functions, class methods, and groups of functions and class methods.
-
-System Integration Tests
-------------------------
-
-NOTE(bja, 2017-11) The systems integration tests currently do not include svn repositories.
-
-The manage_externals package is extremely tedious and error prone to test manually.
-
-Combinations that must be tested to ensure basic functionality are:
-
-* container repository pulling in simple externals
-
-* container repository pulling in mixed externals with sub-externals.
-
-* mixed repository acting as a container, pulling in simple externals and sub-externals
-
-Automatic system tests are handled the same way manual testing is done:
-
-* clone a test repository
-
-* create an externals description file for the test
-
-* run the executable with the desired args
-
-* check the results
-
-* potentially modify the repo (checkout a different branch)
-
-* rerun and test
-
-* etc
-
-The automated system stores small test repositories in the main repo
-by adding them as bare repositories. These repos are cloned via a
-subprocess call to git and manipulated during the tests.
diff --git a/manage_externals/test/repos/README.md b/manage_externals/test/repos/README.md
deleted file mode 100644
index 026b684ea3..0000000000
--- a/manage_externals/test/repos/README.md
+++ /dev/null
@@ -1,33 +0,0 @@
-Git and svn repositories for testing git and svn-related behavior. For usage and terminology notes, see test/test_sys_checkout.py.
-
-For git repos: To list files and view file contents at HEAD:
-```
-cd
-git ls-tree --full-tree -r --name-only HEAD
-git cat-file -p HEAD:
-```
-
-File contents at a glance:
-```
-container.git/
- readme.txt
-
-simple-ext.git/
- (has branches: feature2, feature3)
- (has tags: tag1, tag2)
- readme.txt
- simple_subdir/subdir_file.txt
-
-simple-ext-fork.git/
- (has tags: abandoned-feature, forked-feature-v1, tag1)
- (has branch: feature2)
- readme.txt
-
-mixed-cont-ext.git/
- (has branch: new-feature)
- readme.txt
- sub-externals.cfg ('simp_branch' section refers to 'feature2' branch in simple-ext.git/ repo)
-
-error/
- (no git repo here, just a readme.txt in the clear)
-```
diff --git a/manage_externals/test/repos/container.git/HEAD b/manage_externals/test/repos/container.git/HEAD
deleted file mode 100644
index cb089cd89a..0000000000
--- a/manage_externals/test/repos/container.git/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-ref: refs/heads/master
diff --git a/manage_externals/test/repos/container.git/config b/manage_externals/test/repos/container.git/config
deleted file mode 100644
index e6da231579..0000000000
--- a/manage_externals/test/repos/container.git/config
+++ /dev/null
@@ -1,6 +0,0 @@
-[core]
- repositoryformatversion = 0
- filemode = true
- bare = true
- ignorecase = true
- precomposeunicode = true
diff --git a/manage_externals/test/repos/container.git/description b/manage_externals/test/repos/container.git/description
deleted file mode 100644
index 498b267a8c..0000000000
--- a/manage_externals/test/repos/container.git/description
+++ /dev/null
@@ -1 +0,0 @@
-Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/container.git/info/exclude b/manage_externals/test/repos/container.git/info/exclude
deleted file mode 100644
index a5196d1be8..0000000000
--- a/manage_externals/test/repos/container.git/info/exclude
+++ /dev/null
@@ -1,6 +0,0 @@
-# git ls-files --others --exclude-from=.git/info/exclude
-# Lines that start with '#' are comments.
-# For a project mostly in C, the following would be a good set of
-# exclude patterns (uncomment them if you want to use them):
-# *.[oa]
-# *~
diff --git a/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801
deleted file mode 100644
index f65234e17f..0000000000
Binary files a/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 and /dev/null differ
diff --git a/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de b/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de
deleted file mode 100644
index 9759965b1b..0000000000
Binary files a/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de and /dev/null differ
diff --git a/manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b b/manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b
deleted file mode 100644
index d9976cc442..0000000000
Binary files a/manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b and /dev/null differ
diff --git a/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 b/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03
deleted file mode 100644
index 460fd77819..0000000000
Binary files a/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 and /dev/null differ
diff --git a/manage_externals/test/repos/container.git/refs/heads/master b/manage_externals/test/repos/container.git/refs/heads/master
deleted file mode 100644
index 3ae00f3af0..0000000000
--- a/manage_externals/test/repos/container.git/refs/heads/master
+++ /dev/null
@@ -1 +0,0 @@
-715b8f3e4afe1802a178e1d603af404ba45d59de
diff --git a/manage_externals/test/repos/error/readme.txt b/manage_externals/test/repos/error/readme.txt
deleted file mode 100644
index 6b5753377e..0000000000
--- a/manage_externals/test/repos/error/readme.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Invalid or corrupted git repository (.git dir exists, but is empty) for error
-testing.
-
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/HEAD b/manage_externals/test/repos/mixed-cont-ext.git/HEAD
deleted file mode 100644
index cb089cd89a..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-ref: refs/heads/master
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/config b/manage_externals/test/repos/mixed-cont-ext.git/config
deleted file mode 100644
index e6da231579..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/config
+++ /dev/null
@@ -1,6 +0,0 @@
-[core]
- repositoryformatversion = 0
- filemode = true
- bare = true
- ignorecase = true
- precomposeunicode = true
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/description b/manage_externals/test/repos/mixed-cont-ext.git/description
deleted file mode 100644
index 498b267a8c..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/description
+++ /dev/null
@@ -1 +0,0 @@
-Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/info/exclude b/manage_externals/test/repos/mixed-cont-ext.git/info/exclude
deleted file mode 100644
index a5196d1be8..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/info/exclude
+++ /dev/null
@@ -1,6 +0,0 @@
-# git ls-files --others --exclude-from=.git/info/exclude
-# Lines that start with '#' are comments.
-# For a project mostly in C, the following would be a good set of
-# exclude patterns (uncomment them if you want to use them):
-# *.[oa]
-# *~
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/00/437ac2000d5f06fb8a572a01a5bbdae98b17cb b/manage_externals/test/repos/mixed-cont-ext.git/objects/00/437ac2000d5f06fb8a572a01a5bbdae98b17cb
deleted file mode 100644
index 145a6990a8..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/00/437ac2000d5f06fb8a572a01a5bbdae98b17cb and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 b/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379
deleted file mode 100644
index 032f4b1ca6..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 b/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7
deleted file mode 100644
index 13d15a96a5..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/14/368b701616a8c53820b610414a4b9a07540cf6 b/manage_externals/test/repos/mixed-cont-ext.git/objects/14/368b701616a8c53820b610414a4b9a07540cf6
deleted file mode 100644
index 53c4e79ed0..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/objects/14/368b701616a8c53820b610414a4b9a07540cf6
+++ /dev/null
@@ -1 +0,0 @@
-x50S0A1FMWiRh-iitjz
h#F+|m"rFd
<;s̱۬OEQE}TLU<,9}]IiP. 9ze vA$8#DK
\ No newline at end of file
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/15/2b57e1cf23721cd17ff681cb9276e3fb9fc091 b/manage_externals/test/repos/mixed-cont-ext.git/objects/15/2b57e1cf23721cd17ff681cb9276e3fb9fc091
deleted file mode 100644
index d09c006f07..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/objects/15/2b57e1cf23721cd17ff681cb9276e3fb9fc091
+++ /dev/null
@@ -1,2 +0,0 @@
-xKn0)xEӛP"eCuzb0Su)!h9.!<ے,s$P0/f.M_ɅKjc٧$03Ytz:|HK.p缏BUxzL`N2M2J]K۾>#
-MPtM0v&>Kci8V;
\ No newline at end of file
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f b/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f
deleted file mode 100644
index 7bacde68db..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 b/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69
deleted file mode 100644
index 8c6b04837a..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c b/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c
deleted file mode 100644
index 1a35b74d47..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801
deleted file mode 100644
index f65234e17f..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 b/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4
deleted file mode 100644
index 6b2146cae4..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a b/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a
deleted file mode 100644
index 852a051139..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a
+++ /dev/null
@@ -1 +0,0 @@
-xAN09sʎ;~2J^M,'8ԝھ_yyR3؍lmvƕPBFC>y*bla-n^]D,xfv2p }GzxNvq~Zc y+QTt;]C:AgA( XAG*=i\_^'
\ No newline at end of file
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 b/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332
deleted file mode 100644
index 682d799898..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 b/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75
deleted file mode 100644
index 33c9f6cdf1..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75
+++ /dev/null
@@ -1 +0,0 @@
-xKN0YcȟLlK7鴟5#{OzғmW%ӓv8&eFٱ$/UɞzRJ%ZY|YSC/'*}A7Cۑϋ1^L0f7c b/Jo5-Ů;҅AH:XADZ:ڇ8M^
\ No newline at end of file
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd b/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd
deleted file mode 100644
index 73e7cbfbc8..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 b/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38
deleted file mode 100644
index 189ed85bb3..0000000000
Binary files a/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 and /dev/null differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 b/manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901
deleted file mode 100644
index 619e38ee78..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901
+++ /dev/null
@@ -1,2 +0,0 @@
-x=;0:v=rJf`)noW)zgA>.pA
-! w4ݵQ=äZ90k G)*
\ No newline at end of file
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master
deleted file mode 100644
index 1e0eef1ea3..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master
+++ /dev/null
@@ -1 +0,0 @@
-6fc379457ecb4e576a13c7610ae1fa73f845ee6a
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature
deleted file mode 100644
index 607e80d1bc..0000000000
--- a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature
+++ /dev/null
@@ -1 +0,0 @@
-9580ecc12f16334ce44e42287d5d46f927bb7b75
diff --git a/manage_externals/test/repos/simple-ext-fork.git/HEAD b/manage_externals/test/repos/simple-ext-fork.git/HEAD
deleted file mode 100644
index cb089cd89a..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-ref: refs/heads/master
diff --git a/manage_externals/test/repos/simple-ext-fork.git/config b/manage_externals/test/repos/simple-ext-fork.git/config
deleted file mode 100644
index 04eba17870..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/config
+++ /dev/null
@@ -1,8 +0,0 @@
-[core]
- repositoryformatversion = 0
- filemode = true
- bare = true
- ignorecase = true
- precomposeunicode = true
-[remote "origin"]
- url = /Users/andreb/projects/ncar/git-conversion/checkout-model-dev/cesm-demo-externals/manage_externals/test/repos/simple-ext.git
diff --git a/manage_externals/test/repos/simple-ext-fork.git/description b/manage_externals/test/repos/simple-ext-fork.git/description
deleted file mode 100644
index 498b267a8c..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/description
+++ /dev/null
@@ -1 +0,0 @@
-Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/simple-ext-fork.git/info/exclude b/manage_externals/test/repos/simple-ext-fork.git/info/exclude
deleted file mode 100644
index a5196d1be8..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/info/exclude
+++ /dev/null
@@ -1,6 +0,0 @@
-# git ls-files --others --exclude-from=.git/info/exclude
-# Lines that start with '#' are comments.
-# For a project mostly in C, the following would be a good set of
-# exclude patterns (uncomment them if you want to use them):
-# *.[oa]
-# *~
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f
deleted file mode 100644
index ae28c037e5..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 b/manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8
deleted file mode 100644
index 32d6896e3c..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 b/manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677
deleted file mode 100644
index db51ce1953..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
deleted file mode 100644
index 564e7bba63..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
+++ /dev/null
@@ -1,2 +0,0 @@
-x%K
-0@]se&DԛL!l).u.@_J0lM~v:mLiY*/@pWJ&)*
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f b/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f
deleted file mode 100644
index 0d738af68b..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 b/manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4
deleted file mode 100644
index b6284f8413..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf b/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf
deleted file mode 100644
index 0999f0d4b9..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48
deleted file mode 100644
index 9da8434f65..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26 b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26
deleted file mode 100644
index 22065ba543..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26
+++ /dev/null
@@ -1,2 +0,0 @@
-xmQ
-0EQq
$LހO_* t0J8͡bE?؋g4Nmbag[b{_Ic>`}0M؇Bs0/}::
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b
deleted file mode 100644
index 9a31c7ef2e..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b
+++ /dev/null
@@ -1,2 +0,0 @@
-xKn0)x,IEџA#t7o۶vp.zS&od8xLd@̋C6f%
-pt$m&JdhݗVxp7^/o7dK1GDs#뿏{o?Z 7,\grPkSkJ^
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 b/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95
deleted file mode 100644
index d8ba654548..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95
+++ /dev/null
@@ -1,3 +0,0 @@
-xU[
-0a@%Is+;c/DqV> wWJژ>8!!&'S=)CF+I2OTs^Xn`2Bcw'w
-\NqݛF)83(2:0x-<!6,i 9
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 b/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936
deleted file mode 100644
index 9b40a0afa0..0000000000
Binary files a/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b b/manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b
deleted file mode 100644
index 3019d2bac0..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b
+++ /dev/null
@@ -1 +0,0 @@
-x5
Dќb*dni Yl YX%bۖ,`W8.G&ר-T$vڳp,=:-O}3u:]8慴k{|0
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 b/manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
deleted file mode 100644
index 1d27accb58..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
+++ /dev/null
@@ -1 +0,0 @@
-x
@TeV`p ;vɼ&מi+b%Ns(G7/nǩ-UlGjV&Y+!|
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca b/manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca
deleted file mode 100644
index 3e945cdeb1..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca
+++ /dev/null
@@ -1 +0,0 @@
-x 1ENӀcf+cFBw-ˁù2v0mzO^4rv7"̉z&sb$>D}D>Nv{ZMI?jps8gӽqڥZqojfJ{]յOm/3$Q_@H
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/packed-refs b/manage_externals/test/repos/simple-ext-fork.git/packed-refs
deleted file mode 100644
index b8f9e86308..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/packed-refs
+++ /dev/null
@@ -1,5 +0,0 @@
-# pack-refs with: peeled fully-peeled sorted
-36418b4e5665956a90725c9a1b5a8e551c5f3d48 refs/heads/feature2
-9b75494003deca69527bb64bcaa352e801611dd2 refs/heads/master
-11a76e3d9a67313dec7ce1230852ab5c86352c5c refs/tags/tag1
-^9b75494003deca69527bb64bcaa352e801611dd2
diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 b/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2
deleted file mode 100644
index d223b0362d..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2
+++ /dev/null
@@ -1 +0,0 @@
-f268d4e56d067da9bd1d85e55bdc40a8bd2b0bca
diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature
deleted file mode 100644
index 8a18bf08e9..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature
+++ /dev/null
@@ -1 +0,0 @@
-a42fe9144f5707bc1e9515ce1b44681f7aba6f95
diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1
deleted file mode 100644
index 2764b552d5..0000000000
--- a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1
+++ /dev/null
@@ -1 +0,0 @@
-8d2b3b35126224c975d23f109aa1e3cbac452989
diff --git a/manage_externals/test/repos/simple-ext.git/HEAD b/manage_externals/test/repos/simple-ext.git/HEAD
deleted file mode 100644
index cb089cd89a..0000000000
--- a/manage_externals/test/repos/simple-ext.git/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-ref: refs/heads/master
diff --git a/manage_externals/test/repos/simple-ext.git/config b/manage_externals/test/repos/simple-ext.git/config
deleted file mode 100644
index e6da231579..0000000000
--- a/manage_externals/test/repos/simple-ext.git/config
+++ /dev/null
@@ -1,6 +0,0 @@
-[core]
- repositoryformatversion = 0
- filemode = true
- bare = true
- ignorecase = true
- precomposeunicode = true
diff --git a/manage_externals/test/repos/simple-ext.git/description b/manage_externals/test/repos/simple-ext.git/description
deleted file mode 100644
index 498b267a8c..0000000000
--- a/manage_externals/test/repos/simple-ext.git/description
+++ /dev/null
@@ -1 +0,0 @@
-Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/simple-ext.git/info/exclude b/manage_externals/test/repos/simple-ext.git/info/exclude
deleted file mode 100644
index a5196d1be8..0000000000
--- a/manage_externals/test/repos/simple-ext.git/info/exclude
+++ /dev/null
@@ -1,6 +0,0 @@
-# git ls-files --others --exclude-from=.git/info/exclude
-# Lines that start with '#' are comments.
-# For a project mostly in C, the following would be a good set of
-# exclude patterns (uncomment them if you want to use them):
-# *.[oa]
-# *~
diff --git a/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f
deleted file mode 100644
index ae28c037e5..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/09/0e1034746b2c865f7b0280813dbf4061a700e8 b/manage_externals/test/repos/simple-ext.git/objects/09/0e1034746b2c865f7b0280813dbf4061a700e8
deleted file mode 100644
index e5255047bf..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/09/0e1034746b2c865f7b0280813dbf4061a700e8 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 b/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8
deleted file mode 100644
index 32d6896e3c..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
deleted file mode 100644
index 564e7bba63..0000000000
--- a/manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
+++ /dev/null
@@ -1,2 +0,0 @@
-x%K
-0@]se&DԛL!l).u.@_J0lM~v:mLiY*/@pWJ&)*
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d b/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d
deleted file mode 100644
index acaf7889b4..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 b/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11
deleted file mode 100644
index 0f0db6797f..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48
deleted file mode 100644
index 9da8434f65..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801
deleted file mode 100644
index f65234e17f..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/60/7ec299c17dd285c029edc41a0109e49d441380 b/manage_externals/test/repos/simple-ext.git/objects/60/7ec299c17dd285c029edc41a0109e49d441380
deleted file mode 100644
index 3f6959cc54..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/60/7ec299c17dd285c029edc41a0109e49d441380 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5 b/manage_externals/test/repos/simple-ext.git/objects/60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5
deleted file mode 100644
index 68a86c24ea..0000000000
--- a/manage_externals/test/repos/simple-ext.git/objects/60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5
+++ /dev/null
@@ -1,2 +0,0 @@
-xQ {XXdc7Y`ۚo=/3uoPw6YB9MĜc&iښy˦KK9()
-Raq$)+|
ȧ
nMᜟik(|GFkN{]X+,xoC#
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/63/a99393d1baff97ccef967af30380659867b139 b/manage_externals/test/repos/simple-ext.git/objects/63/a99393d1baff97ccef967af30380659867b139
deleted file mode 100644
index efe17af8fd..0000000000
--- a/manage_externals/test/repos/simple-ext.git/objects/63/a99393d1baff97ccef967af30380659867b139
+++ /dev/null
@@ -1 +0,0 @@
-x5 B1=Wb@bf!7dWE0LVmýcN=09%l~hP?rPkևЏ)]5yB.mg4ns$*
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/95/3256da5612fcd9263590a353bc18c6f224e74f b/manage_externals/test/repos/simple-ext.git/objects/95/3256da5612fcd9263590a353bc18c6f224e74f
deleted file mode 100644
index 6187628628..0000000000
--- a/manage_externals/test/repos/simple-ext.git/objects/95/3256da5612fcd9263590a353bc18c6f224e74f
+++ /dev/null
@@ -1 +0,0 @@
-x
ʱ
0DԚ&HeO$Edd/]lXe\A7h#wTN){Js-k)=jh2^kH$
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 b/manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2
deleted file mode 100644
index ba1b51f515..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 b/manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04
deleted file mode 100644
index fb5feb96c2..0000000000
--- a/manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04
+++ /dev/null
@@ -1 +0,0 @@
-x
0@;ś?Z&nǕnMkt"a.a-Ѡ>rPkSkJ^
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 b/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7
deleted file mode 100644
index 1b3b272442..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 b/manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
deleted file mode 100644
index 1d27accb58..0000000000
--- a/manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
+++ /dev/null
@@ -1 +0,0 @@
-x
@TeV`p ;vɼ&מi+b%Ns(G7/nǩ-UlGjV&Y+!|
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/d1/163870d19c3dee34fada3a76b785cfa2a8424b b/manage_externals/test/repos/simple-ext.git/objects/d1/163870d19c3dee34fada3a76b785cfa2a8424b
deleted file mode 100644
index 04e760363a..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/d1/163870d19c3dee34fada3a76b785cfa2a8424b and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 b/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4
deleted file mode 100644
index f08ae820c9..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff b/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff
deleted file mode 100644
index 4018ea5914..0000000000
Binary files a/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.git/refs/heads/feature2 b/manage_externals/test/repos/simple-ext.git/refs/heads/feature2
deleted file mode 100644
index 01a0dd6e23..0000000000
--- a/manage_externals/test/repos/simple-ext.git/refs/heads/feature2
+++ /dev/null
@@ -1 +0,0 @@
-36418b4e5665956a90725c9a1b5a8e551c5f3d48
diff --git a/manage_externals/test/repos/simple-ext.git/refs/heads/feature3 b/manage_externals/test/repos/simple-ext.git/refs/heads/feature3
deleted file mode 100644
index dd24079fce..0000000000
--- a/manage_externals/test/repos/simple-ext.git/refs/heads/feature3
+++ /dev/null
@@ -1 +0,0 @@
-090e1034746b2c865f7b0280813dbf4061a700e8
diff --git a/manage_externals/test/repos/simple-ext.git/refs/heads/master b/manage_externals/test/repos/simple-ext.git/refs/heads/master
deleted file mode 100644
index adf1ccb002..0000000000
--- a/manage_externals/test/repos/simple-ext.git/refs/heads/master
+++ /dev/null
@@ -1 +0,0 @@
-607ec299c17dd285c029edc41a0109e49d441380
diff --git a/manage_externals/test/repos/simple-ext.git/refs/tags/tag1 b/manage_externals/test/repos/simple-ext.git/refs/tags/tag1
deleted file mode 100644
index ee595be8bd..0000000000
--- a/manage_externals/test/repos/simple-ext.git/refs/tags/tag1
+++ /dev/null
@@ -1 +0,0 @@
-11a76e3d9a67313dec7ce1230852ab5c86352c5c
diff --git a/manage_externals/test/repos/simple-ext.git/refs/tags/tag2 b/manage_externals/test/repos/simple-ext.git/refs/tags/tag2
deleted file mode 100644
index 4160b6c494..0000000000
--- a/manage_externals/test/repos/simple-ext.git/refs/tags/tag2
+++ /dev/null
@@ -1 +0,0 @@
-b7692b6d391899680da7b9b6fd8af4c413f06fe7
diff --git a/manage_externals/test/repos/simple-ext.svn/README.txt b/manage_externals/test/repos/simple-ext.svn/README.txt
deleted file mode 100644
index 9935818a1b..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/README.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-This is a Subversion repository; use the 'svnadmin' and 'svnlook'
-tools to examine it. Do not add, delete, or modify files here
-unless you know how to avoid corrupting the repository.
-
-Visit http://subversion.apache.org/ for more information.
diff --git a/manage_externals/test/repos/simple-ext.svn/conf/authz b/manage_externals/test/repos/simple-ext.svn/conf/authz
deleted file mode 100644
index 0b9a41074e..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/conf/authz
+++ /dev/null
@@ -1,32 +0,0 @@
-### This file is an example authorization file for svnserve.
-### Its format is identical to that of mod_authz_svn authorization
-### files.
-### As shown below each section defines authorizations for the path and
-### (optional) repository specified by the section name.
-### The authorizations follow. An authorization line can refer to:
-### - a single user,
-### - a group of users defined in a special [groups] section,
-### - an alias defined in a special [aliases] section,
-### - all authenticated users, using the '$authenticated' token,
-### - only anonymous users, using the '$anonymous' token,
-### - anyone, using the '*' wildcard.
-###
-### A match can be inverted by prefixing the rule with '~'. Rules can
-### grant read ('r') access, read-write ('rw') access, or no access
-### ('').
-
-[aliases]
-# joe = /C=XZ/ST=Dessert/L=Snake City/O=Snake Oil, Ltd./OU=Research Institute/CN=Joe Average
-
-[groups]
-# harry_and_sally = harry,sally
-# harry_sally_and_joe = harry,sally,&joe
-
-# [/foo/bar]
-# harry = rw
-# &joe = r
-# * =
-
-# [repository:/baz/fuz]
-# @harry_and_sally = rw
-# * = r
diff --git a/manage_externals/test/repos/simple-ext.svn/conf/hooks-env.tmpl b/manage_externals/test/repos/simple-ext.svn/conf/hooks-env.tmpl
deleted file mode 100644
index ee965c316c..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/conf/hooks-env.tmpl
+++ /dev/null
@@ -1,19 +0,0 @@
-### This file is an example hook script environment configuration file.
-### Hook scripts run in an empty environment by default.
-### As shown below each section defines environment variables for a
-### particular hook script. The [default] section defines environment
-### variables for all hook scripts, unless overridden by a hook-specific
-### section.
-
-### This example configures a UTF-8 locale for all hook scripts, so that
-### special characters, such as umlauts, may be printed to stderr.
-### If UTF-8 is used with a mod_dav_svn server, the SVNUseUTF8 option must
-### also be set to 'yes' in httpd.conf.
-### With svnserve, the LANG environment variable of the svnserve process
-### must be set to the same value as given here.
-[default]
-LANG = en_US.UTF-8
-
-### This sets the PATH environment variable for the pre-commit hook.
-[pre-commit]
-PATH = /usr/local/bin:/usr/bin:/usr/sbin
diff --git a/manage_externals/test/repos/simple-ext.svn/conf/passwd b/manage_externals/test/repos/simple-ext.svn/conf/passwd
deleted file mode 100644
index ecaa08dcec..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/conf/passwd
+++ /dev/null
@@ -1,8 +0,0 @@
-### This file is an example password file for svnserve.
-### Its format is similar to that of svnserve.conf. As shown in the
-### example below it contains one section labelled [users].
-### The name and password for each user follow, one account per line.
-
-[users]
-# harry = harryssecret
-# sally = sallyssecret
diff --git a/manage_externals/test/repos/simple-ext.svn/conf/svnserve.conf b/manage_externals/test/repos/simple-ext.svn/conf/svnserve.conf
deleted file mode 100644
index 6cefc17b3e..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/conf/svnserve.conf
+++ /dev/null
@@ -1,81 +0,0 @@
-### This file controls the configuration of the svnserve daemon, if you
-### use it to allow access to this repository. (If you only allow
-### access through http: and/or file: URLs, then this file is
-### irrelevant.)
-
-### Visit http://subversion.apache.org/ for more information.
-
-[general]
-### The anon-access and auth-access options control access to the
-### repository for unauthenticated (a.k.a. anonymous) users and
-### authenticated users, respectively.
-### Valid values are "write", "read", and "none".
-### Setting the value to "none" prohibits both reading and writing;
-### "read" allows read-only access, and "write" allows complete
-### read/write access to the repository.
-### The sample settings below are the defaults and specify that anonymous
-### users have read-only access to the repository, while authenticated
-### users have read and write access to the repository.
-# anon-access = read
-# auth-access = write
-### The password-db option controls the location of the password
-### database file. Unless you specify a path starting with a /,
-### the file's location is relative to the directory containing
-### this configuration file.
-### If SASL is enabled (see below), this file will NOT be used.
-### Uncomment the line below to use the default password file.
-# password-db = passwd
-### The authz-db option controls the location of the authorization
-### rules for path-based access control. Unless you specify a path
-### starting with a /, the file's location is relative to the
-### directory containing this file. The specified path may be a
-### repository relative URL (^/) or an absolute file:// URL to a text
-### file in a Subversion repository. If you don't specify an authz-db,
-### no path-based access control is done.
-### Uncomment the line below to use the default authorization file.
-# authz-db = authz
-### The groups-db option controls the location of the file with the
-### group definitions and allows maintaining groups separately from the
-### authorization rules. The groups-db file is of the same format as the
-### authz-db file and should contain a single [groups] section with the
-### group definitions. If the option is enabled, the authz-db file cannot
-### contain a [groups] section. Unless you specify a path starting with
-### a /, the file's location is relative to the directory containing this
-### file. The specified path may be a repository relative URL (^/) or an
-### absolute file:// URL to a text file in a Subversion repository.
-### This option is not being used by default.
-# groups-db = groups
-### This option specifies the authentication realm of the repository.
-### If two repositories have the same authentication realm, they should
-### have the same password database, and vice versa. The default realm
-### is repository's uuid.
-# realm = My First Repository
-### The force-username-case option causes svnserve to case-normalize
-### usernames before comparing them against the authorization rules in the
-### authz-db file configured above. Valid values are "upper" (to upper-
-### case the usernames), "lower" (to lowercase the usernames), and
-### "none" (to compare usernames as-is without case conversion, which
-### is the default behavior).
-# force-username-case = none
-### The hooks-env options specifies a path to the hook script environment
-### configuration file. This option overrides the per-repository default
-### and can be used to configure the hook script environment for multiple
-### repositories in a single file, if an absolute path is specified.
-### Unless you specify an absolute path, the file's location is relative
-### to the directory containing this file.
-# hooks-env = hooks-env
-
-[sasl]
-### This option specifies whether you want to use the Cyrus SASL
-### library for authentication. Default is false.
-### Enabling this option requires svnserve to have been built with Cyrus
-### SASL support; to check, run 'svnserve --version' and look for a line
-### reading 'Cyrus SASL authentication is available.'
-# use-sasl = true
-### These options specify the desired strength of the security layer
-### that you want SASL to provide. 0 means no encryption, 1 means
-### integrity-checking only, values larger than 1 are correlated
-### to the effective key length for encryption (e.g. 128 means 128-bit
-### encryption). The values below are the defaults.
-# min-encryption = 0
-# max-encryption = 256
diff --git a/manage_externals/test/repos/simple-ext.svn/db/current b/manage_externals/test/repos/simple-ext.svn/db/current
deleted file mode 100644
index 00750edc07..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/current
+++ /dev/null
@@ -1 +0,0 @@
-3
diff --git a/manage_externals/test/repos/simple-ext.svn/db/format b/manage_externals/test/repos/simple-ext.svn/db/format
deleted file mode 100644
index 5dd0c22198..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/format
+++ /dev/null
@@ -1,3 +0,0 @@
-8
-layout sharded 1000
-addressing logical
diff --git a/manage_externals/test/repos/simple-ext.svn/db/fs-type b/manage_externals/test/repos/simple-ext.svn/db/fs-type
deleted file mode 100644
index 4fdd95313f..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/fs-type
+++ /dev/null
@@ -1 +0,0 @@
-fsfs
diff --git a/manage_externals/test/repos/simple-ext.svn/db/fsfs.conf b/manage_externals/test/repos/simple-ext.svn/db/fsfs.conf
deleted file mode 100644
index ac6877a727..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/fsfs.conf
+++ /dev/null
@@ -1,200 +0,0 @@
-### This file controls the configuration of the FSFS filesystem.
-
-[memcached-servers]
-### These options name memcached servers used to cache internal FSFS
-### data. See http://www.danga.com/memcached/ for more information on
-### memcached. To use memcached with FSFS, run one or more memcached
-### servers, and specify each of them as an option like so:
-# first-server = 127.0.0.1:11211
-# remote-memcached = mymemcached.corp.example.com:11212
-### The option name is ignored; the value is of the form HOST:PORT.
-### memcached servers can be shared between multiple repositories;
-### however, if you do this, you *must* ensure that repositories have
-### distinct UUIDs and paths, or else cached data from one repository
-### might be used by another accidentally. Note also that memcached has
-### no authentication for reads or writes, so you must ensure that your
-### memcached servers are only accessible by trusted users.
-
-[caches]
-### When a cache-related error occurs, normally Subversion ignores it
-### and continues, logging an error if the server is appropriately
-### configured (and ignoring it with file:// access). To make
-### Subversion never ignore cache errors, uncomment this line.
-# fail-stop = true
-
-[rep-sharing]
-### To conserve space, the filesystem can optionally avoid storing
-### duplicate representations. This comes at a slight cost in
-### performance, as maintaining a database of shared representations can
-### increase commit times. The space savings are dependent upon the size
-### of the repository, the number of objects it contains and the amount of
-### duplication between them, usually a function of the branching and
-### merging process.
-###
-### The following parameter enables rep-sharing in the repository. It can
-### be switched on and off at will, but for best space-saving results
-### should be enabled consistently over the life of the repository.
-### 'svnadmin verify' will check the rep-cache regardless of this setting.
-### rep-sharing is enabled by default.
-# enable-rep-sharing = true
-
-[deltification]
-### To conserve space, the filesystem stores data as differences against
-### existing representations. This comes at a slight cost in performance,
-### as calculating differences can increase commit times. Reading data
-### will also create higher CPU load and the data will be fragmented.
-### Since deltification tends to save significant amounts of disk space,
-### the overall I/O load can actually be lower.
-###
-### The options in this section allow for tuning the deltification
-### strategy. Their effects on data size and server performance may vary
-### from one repository to another. Versions prior to 1.8 will ignore
-### this section.
-###
-### The following parameter enables deltification for directories. It can
-### be switched on and off at will, but for best space-saving results
-### should be enabled consistently over the lifetime of the repository.
-### Repositories containing large directories will benefit greatly.
-### In rarely accessed repositories, the I/O overhead may be significant
-### as caches will most likely be low.
-### directory deltification is enabled by default.
-# enable-dir-deltification = true
-###
-### The following parameter enables deltification for properties on files
-### and directories. Overall, this is a minor tuning option but can save
-### some disk space if you merge frequently or frequently change node
-### properties. You should not activate this if rep-sharing has been
-### disabled because this may result in a net increase in repository size.
-### property deltification is enabled by default.
-# enable-props-deltification = true
-###
-### During commit, the server may need to walk the whole change history of
-### of a given node to find a suitable deltification base. This linear
-### process can impact commit times, svnadmin load and similar operations.
-### This setting limits the depth of the deltification history. If the
-### threshold has been reached, the node will be stored as fulltext and a
-### new deltification history begins.
-### Note, this is unrelated to svn log.
-### Very large values rarely provide significant additional savings but
-### can impact performance greatly - in particular if directory
-### deltification has been activated. Very small values may be useful in
-### repositories that are dominated by large, changing binaries.
-### Should be a power of two minus 1. A value of 0 will effectively
-### disable deltification.
-### For 1.8, the default value is 1023; earlier versions have no limit.
-# max-deltification-walk = 1023
-###
-### The skip-delta scheme used by FSFS tends to repeatably store redundant
-### delta information where a simple delta against the latest version is
-### often smaller. By default, 1.8+ will therefore use skip deltas only
-### after the linear chain of deltas has grown beyond the threshold
-### specified by this setting.
-### Values up to 64 can result in some reduction in repository size for
-### the cost of quickly increasing I/O and CPU costs. Similarly, smaller
-### numbers can reduce those costs at the cost of more disk space. For
-### rarely read repositories or those containing larger binaries, this may
-### present a better trade-off.
-### Should be a power of two. A value of 1 or smaller will cause the
-### exclusive use of skip-deltas (as in pre-1.8).
-### For 1.8, the default value is 16; earlier versions use 1.
-# max-linear-deltification = 16
-###
-### After deltification, we compress the data to minimize on-disk size.
-### This setting controls the compression algorithm, which will be used in
-### future revisions. It can be used to either disable compression or to
-### select between available algorithms (zlib, lz4). zlib is a general-
-### purpose compression algorithm. lz4 is a fast compression algorithm
-### which should be preferred for repositories with large and, possibly,
-### incompressible files. Note that the compression ratio of lz4 is
-### usually lower than the one provided by zlib, but using it can
-### significantly speed up commits as well as reading the data.
-### lz4 compression algorithm is supported, starting from format 8
-### repositories, available in Subversion 1.10 and higher.
-### The syntax of this option is:
-### compression = none | lz4 | zlib | zlib-1 ... zlib-9
-### Versions prior to Subversion 1.10 will ignore this option.
-### The default value is 'lz4' if supported by the repository format and
-### 'zlib' otherwise. 'zlib' is currently equivalent to 'zlib-5'.
-# compression = lz4
-###
-### DEPRECATED: The new 'compression' option deprecates previously used
-### 'compression-level' option, which was used to configure zlib compression.
-### For compatibility with previous versions of Subversion, this option can
-### still be used (and it will result in zlib compression with the
-### corresponding compression level).
-### compression-level = 0 ... 9 (default is 5)
-
-[packed-revprops]
-### This parameter controls the size (in kBytes) of packed revprop files.
-### Revprops of consecutive revisions will be concatenated into a single
-### file up to but not exceeding the threshold given here. However, each
-### pack file may be much smaller and revprops of a single revision may be
-### much larger than the limit set here. The threshold will be applied
-### before optional compression takes place.
-### Large values will reduce disk space usage at the expense of increased
-### latency and CPU usage reading and changing individual revprops.
-### Values smaller than 4 kByte will not improve latency any further and
-### quickly render revprop packing ineffective.
-### revprop-pack-size is 16 kBytes by default for non-compressed revprop
-### pack files and 64 kBytes when compression has been enabled.
-# revprop-pack-size = 16
-###
-### To save disk space, packed revprop files may be compressed. Standard
-### revprops tend to allow for very effective compression. Reading and
-### even more so writing, become significantly more CPU intensive.
-### Compressing packed revprops is disabled by default.
-# compress-packed-revprops = false
-
-[io]
-### Parameters in this section control the data access granularity in
-### format 7 repositories and later. The defaults should translate into
-### decent performance over a wide range of setups.
-###
-### When a specific piece of information needs to be read from disk, a
-### data block is being read at once and its contents are being cached.
-### If the repository is being stored on a RAID, the block size should be
-### either 50% or 100% of RAID block size / granularity. Also, your file
-### system blocks/clusters should be properly aligned and sized. In that
-### setup, each access will hit only one disk (minimizes I/O load) but
-### uses all the data provided by the disk in a single access.
-### For SSD-based storage systems, slightly lower values around 16 kB
-### may improve latency while still maximizing throughput. If block-read
-### has not been enabled, this will be capped to 4 kBytes.
-### Can be changed at any time but must be a power of 2.
-### block-size is given in kBytes and with a default of 64 kBytes.
-# block-size = 64
-###
-### The log-to-phys index maps data item numbers to offsets within the
-### rev or pack file. This index is organized in pages of a fixed maximum
-### capacity. To access an item, the page table and the respective page
-### must be read.
-### This parameter only affects revisions with thousands of changed paths.
-### If you have several extremely large revisions (~1 mio changes), think
-### about increasing this setting. Reducing the value will rarely result
-### in a net speedup.
-### This is an expert setting. Must be a power of 2.
-### l2p-page-size is 8192 entries by default.
-# l2p-page-size = 8192
-###
-### The phys-to-log index maps positions within the rev or pack file to
-### to data items, i.e. describes what piece of information is being
-### stored at any particular offset. The index describes the rev file
-### in chunks (pages) and keeps a global list of all those pages. Large
-### pages mean a shorter page table but a larger per-page description of
-### data items in it. The latency sweetspot depends on the change size
-### distribution but covers a relatively wide range.
-### If the repository contains very large files, i.e. individual changes
-### of tens of MB each, increasing the page size will shorten the index
-### file at the expense of a slightly increased latency in sections with
-### smaller changes.
-### For source code repositories, this should be about 16x the block-size.
-### Must be a power of 2.
-### p2l-page-size is given in kBytes and with a default of 1024 kBytes.
-# p2l-page-size = 1024
-
-[debug]
-###
-### Whether to verify each new revision immediately before finalizing
-### the commit. This is disabled by default except in maintainer-mode
-### builds.
-# verify-before-commit = false
diff --git a/manage_externals/test/repos/simple-ext.svn/db/min-unpacked-rev b/manage_externals/test/repos/simple-ext.svn/db/min-unpacked-rev
deleted file mode 100644
index 573541ac97..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/min-unpacked-rev
+++ /dev/null
@@ -1 +0,0 @@
-0
diff --git a/manage_externals/test/repos/simple-ext.svn/db/rep-cache.db b/manage_externals/test/repos/simple-ext.svn/db/rep-cache.db
deleted file mode 100644
index 3193b2eaad..0000000000
Binary files a/manage_externals/test/repos/simple-ext.svn/db/rep-cache.db and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/0 b/manage_externals/test/repos/simple-ext.svn/db/revprops/0/0
deleted file mode 100644
index 92768005d3..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/0
+++ /dev/null
@@ -1,5 +0,0 @@
-K 8
-svn:date
-V 27
-2023-11-16T20:11:46.318861Z
-END
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/1 b/manage_externals/test/repos/simple-ext.svn/db/revprops/0/1
deleted file mode 100644
index aa95a9de9f..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/1
+++ /dev/null
@@ -1,13 +0,0 @@
-K 10
-svn:author
-V 5
-sacks
-K 8
-svn:date
-V 27
-2023-11-16T20:15:56.917904Z
-K 7
-svn:log
-V 30
-Setting up directory structure
-END
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/2 b/manage_externals/test/repos/simple-ext.svn/db/revprops/0/2
deleted file mode 100644
index 3d04d8909a..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/2
+++ /dev/null
@@ -1,13 +0,0 @@
-K 10
-svn:author
-V 5
-sacks
-K 8
-svn:date
-V 27
-2023-11-16T20:27:31.407916Z
-K 7
-svn:log
-V 10
-Add README
-END
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/3 b/manage_externals/test/repos/simple-ext.svn/db/revprops/0/3
deleted file mode 100644
index de20268415..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/revprops/0/3
+++ /dev/null
@@ -1,13 +0,0 @@
-K 10
-svn:author
-V 5
-sacks
-K 8
-svn:date
-V 27
-2023-11-16T21:14:43.366717Z
-K 7
-svn:log
-V 27
-Creating cesm2.0.beta07 tag
-END
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revs/0/0 b/manage_externals/test/repos/simple-ext.svn/db/revs/0/0
deleted file mode 100644
index 9a56c280c4..0000000000
Binary files a/manage_externals/test/repos/simple-ext.svn/db/revs/0/0 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revs/0/1 b/manage_externals/test/repos/simple-ext.svn/db/revs/0/1
deleted file mode 100644
index c6982eeab7..0000000000
Binary files a/manage_externals/test/repos/simple-ext.svn/db/revs/0/1 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revs/0/2 b/manage_externals/test/repos/simple-ext.svn/db/revs/0/2
deleted file mode 100644
index 99a14cf4b7..0000000000
Binary files a/manage_externals/test/repos/simple-ext.svn/db/revs/0/2 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.svn/db/revs/0/3 b/manage_externals/test/repos/simple-ext.svn/db/revs/0/3
deleted file mode 100644
index f437a6d530..0000000000
Binary files a/manage_externals/test/repos/simple-ext.svn/db/revs/0/3 and /dev/null differ
diff --git a/manage_externals/test/repos/simple-ext.svn/db/txn-current b/manage_externals/test/repos/simple-ext.svn/db/txn-current
deleted file mode 100644
index 00750edc07..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/txn-current
+++ /dev/null
@@ -1 +0,0 @@
-3
diff --git a/manage_externals/test/repos/simple-ext.svn/db/txn-current-lock b/manage_externals/test/repos/simple-ext.svn/db/txn-current-lock
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/manage_externals/test/repos/simple-ext.svn/db/uuid b/manage_externals/test/repos/simple-ext.svn/db/uuid
deleted file mode 100644
index 0b16502652..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/db/uuid
+++ /dev/null
@@ -1,2 +0,0 @@
-1c80dd47-0c07-4207-8ee0-e60dd9d98853
-31d57ab1-759c-4129-a63d-898c774d96c9
diff --git a/manage_externals/test/repos/simple-ext.svn/db/write-lock b/manage_externals/test/repos/simple-ext.svn/db/write-lock
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/manage_externals/test/repos/simple-ext.svn/format b/manage_externals/test/repos/simple-ext.svn/format
deleted file mode 100644
index 7ed6ff82de..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/format
+++ /dev/null
@@ -1 +0,0 @@
-5
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/post-commit.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/post-commit.tmpl
deleted file mode 100755
index 988f041fa5..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/post-commit.tmpl
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/sh
-
-# POST-COMMIT HOOK
-#
-# The post-commit hook is invoked after a commit. Subversion runs
-# this hook by invoking a program (script, executable, binary, etc.)
-# named 'post-commit' (for which this file is a template) with the
-# following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] REV (the number of the revision just committed)
-# [3] TXN-NAME (the name of the transaction that has become REV)
-#
-# Because the commit has already completed and cannot be undone,
-# the exit code of the hook program is ignored. The hook program
-# can use the 'svnlook' utility to help it examine the
-# newly-committed tree.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'post-commit'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'post-commit' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'post-commit.bat' or 'post-commit.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-REV="$2"
-TXN_NAME="$3"
-
-mailer.py commit "$REPOS" "$REV" /path/to/mailer.conf
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/post-lock.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/post-lock.tmpl
deleted file mode 100755
index 96f2165209..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/post-lock.tmpl
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/sh
-
-# POST-LOCK HOOK
-#
-# The post-lock hook is run after a path is locked. Subversion runs
-# this hook by invoking a program (script, executable, binary, etc.)
-# named 'post-lock' (for which this file is a template) with the
-# following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] USER (the user who created the lock)
-#
-# The paths that were just locked are passed to the hook via STDIN.
-#
-# Because the locks have already been created and cannot be undone,
-# the exit code of the hook program is ignored. The hook program
-# can use the 'svnlook' utility to examine the paths in the repository
-# but since the hook is invoked asynchronously the newly-created locks
-# may no longer be present.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'post-lock'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'post-lock' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'post-lock.bat' or 'post-lock.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-USER="$2"
-
-# Send email to interested parties, let them know a lock was created:
-mailer.py lock "$REPOS" "$USER" /path/to/mailer.conf
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/post-revprop-change.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/post-revprop-change.tmpl
deleted file mode 100755
index de1b914648..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/post-revprop-change.tmpl
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/sh
-
-# POST-REVPROP-CHANGE HOOK
-#
-# The post-revprop-change hook is invoked after a revision property
-# has been added, modified or deleted. Subversion runs this hook by
-# invoking a program (script, executable, binary, etc.) named
-# 'post-revprop-change' (for which this file is a template), with the
-# following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] REV (the revision that was tweaked)
-# [3] USER (the username of the person tweaking the property)
-# [4] PROPNAME (the property that was changed)
-# [5] ACTION (the property was 'A'dded, 'M'odified, or 'D'eleted)
-#
-# [STDIN] PROPVAL ** the old property value is passed via STDIN.
-#
-# Because the propchange has already completed and cannot be undone,
-# the exit code of the hook program is ignored. The hook program
-# can use the 'svnlook' utility to help it examine the
-# new property value.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'post-revprop-change'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'post-revprop-change' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'post-revprop-change.bat' or 'post-revprop-change.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-REV="$2"
-USER="$3"
-PROPNAME="$4"
-ACTION="$5"
-
-mailer.py propchange2 "$REPOS" "$REV" "$USER" "$PROPNAME" "$ACTION" /path/to/mailer.conf
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/post-unlock.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/post-unlock.tmpl
deleted file mode 100755
index e33f793c25..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/post-unlock.tmpl
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/bin/sh
-
-# POST-UNLOCK HOOK
-#
-# The post-unlock hook runs after a path is unlocked. Subversion runs
-# this hook by invoking a program (script, executable, binary, etc.)
-# named 'post-unlock' (for which this file is a template) with the
-# following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] USER (the user who destroyed the lock)
-#
-# The paths that were just unlocked are passed to the hook via STDIN.
-#
-# Because the lock has already been destroyed and cannot be undone,
-# the exit code of the hook program is ignored.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'post-unlock'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'post-unlock' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'post-unlock.bat' or 'post-unlock.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-USER="$2"
-
-# Send email to interested parties, let them know a lock was removed:
-mailer.py unlock "$REPOS" "$USER" /path/to/mailer.conf
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-commit.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-commit.tmpl
deleted file mode 100755
index 626e72300c..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-commit.tmpl
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/bin/sh
-
-# PRE-COMMIT HOOK
-#
-# The pre-commit hook is invoked before a Subversion txn is
-# committed. Subversion runs this hook by invoking a program
-# (script, executable, binary, etc.) named 'pre-commit' (for which
-# this file is a template), with the following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] TXN-NAME (the name of the txn about to be committed)
-#
-# [STDIN] LOCK-TOKENS ** the lock tokens are passed via STDIN.
-#
-# If STDIN contains the line "LOCK-TOKENS:\n" (the "\n" denotes a
-# single newline), the lines following it are the lock tokens for
-# this commit. The end of the list is marked by a line containing
-# only a newline character.
-#
-# Each lock token line consists of a URI-escaped path, followed
-# by the separator character '|', followed by the lock token string,
-# followed by a newline.
-#
-# If the hook program exits with success, the txn is committed; but
-# if it exits with failure (non-zero), the txn is aborted, no commit
-# takes place, and STDERR is returned to the client. The hook
-# program can use the 'svnlook' utility to help it examine the txn.
-#
-# *** NOTE: THE HOOK PROGRAM MUST NOT MODIFY THE TXN, EXCEPT ***
-# *** FOR REVISION PROPERTIES (like svn:log or svn:author). ***
-#
-# This is why we recommend using the read-only 'svnlook' utility.
-# In the future, Subversion may enforce the rule that pre-commit
-# hooks should not modify the versioned data in txns, or else come
-# up with a mechanism to make it safe to do so (by informing the
-# committing client of the changes). However, right now neither
-# mechanism is implemented, so hook writers just have to be careful.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'pre-commit'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'pre-commit' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'pre-commit.bat' or 'pre-commit.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-TXN="$2"
-
-# Make sure that the log message contains some text.
-SVNLOOK=/opt/homebrew/Cellar/subversion/1.14.2_1/bin/svnlook
-$SVNLOOK log -t "$TXN" "$REPOS" | \
- grep "[a-zA-Z0-9]" > /dev/null || exit 1
-
-# Check that the author of this commit has the rights to perform
-# the commit on the files and directories being modified.
-commit-access-control.pl "$REPOS" "$TXN" commit-access-control.cfg || exit 1
-
-# All checks passed, so allow the commit.
-exit 0
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl
deleted file mode 100755
index 148582a689..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/bin/sh
-
-# PRE-LOCK HOOK
-#
-# The pre-lock hook is invoked before an exclusive lock is
-# created. Subversion runs this hook by invoking a program
-# (script, executable, binary, etc.) named 'pre-lock' (for which
-# this file is a template), with the following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] PATH (the path in the repository about to be locked)
-# [3] USER (the user creating the lock)
-# [4] COMMENT (the comment of the lock)
-# [5] STEAL-LOCK (1 if the user is trying to steal the lock, else 0)
-#
-# If the hook program outputs anything on stdout, the output string will
-# be used as the lock token for this lock operation. If you choose to use
-# this feature, you must guarantee the tokens generated are unique across
-# the repository each time.
-#
-# If the hook program exits with success, the lock is created; but
-# if it exits with failure (non-zero), the lock action is aborted
-# and STDERR is returned to the client.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'pre-lock'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'pre-lock' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'pre-lock.bat' or 'pre-lock.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-PATH="$2"
-USER="$3"
-COMMENT="$4"
-STEAL="$5"
-
-# If a lock exists and is owned by a different person, don't allow it
-# to be stolen (e.g., with 'svn lock --force ...').
-
-# (Maybe this script could send email to the lock owner?)
-SVNLOOK=/opt/homebrew/Cellar/subversion/1.14.2_1/bin/svnlook
-GREP=/bin/grep
-SED=/bin/sed
-
-LOCK_OWNER=`$SVNLOOK lock "$REPOS" "$PATH" | \
- $GREP '^Owner: ' | $SED 's/Owner: //'`
-
-# If we get no result from svnlook, there's no lock, allow the lock to
-# happen:
-if [ "$LOCK_OWNER" = "" ]; then
- exit 0
-fi
-
-# If the person locking matches the lock's owner, allow the lock to
-# happen:
-if [ "$LOCK_OWNER" = "$USER" ]; then
- exit 0
-fi
-
-# Otherwise, we've got an owner mismatch, so return failure:
-echo "Error: $PATH already locked by ${LOCK_OWNER}." 1>&2
-exit 1
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl
deleted file mode 100755
index 8b065d7c79..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/bin/sh
-
-# PRE-REVPROP-CHANGE HOOK
-#
-# The pre-revprop-change hook is invoked before a revision property
-# is added, modified or deleted. Subversion runs this hook by invoking
-# a program (script, executable, binary, etc.) named 'pre-revprop-change'
-# (for which this file is a template), with the following ordered
-# arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] REV (the revision being tweaked)
-# [3] USER (the username of the person tweaking the property)
-# [4] PROPNAME (the property being set on the revision)
-# [5] ACTION (the property is being 'A'dded, 'M'odified, or 'D'eleted)
-#
-# [STDIN] PROPVAL ** the new property value is passed via STDIN.
-#
-# If the hook program exits with success, the propchange happens; but
-# if it exits with failure (non-zero), the propchange doesn't happen.
-# The hook program can use the 'svnlook' utility to examine the
-# existing value of the revision property.
-#
-# WARNING: unlike other hooks, this hook MUST exist for revision
-# properties to be changed. If the hook does not exist, Subversion
-# will behave as if the hook were present, but failed. The reason
-# for this is that revision properties are UNVERSIONED, meaning that
-# a successful propchange is destructive; the old value is gone
-# forever. We recommend the hook back up the old value somewhere.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'pre-revprop-change'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'pre-revprop-change' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'pre-revprop-change.bat' or 'pre-revprop-change.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-REV="$2"
-USER="$3"
-PROPNAME="$4"
-ACTION="$5"
-
-if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
-
-echo "Changing revision properties other than svn:log is prohibited" >&2
-exit 1
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl
deleted file mode 100755
index 9ba99d071b..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/bin/sh
-
-# PRE-UNLOCK HOOK
-#
-# The pre-unlock hook is invoked before an exclusive lock is
-# destroyed. Subversion runs this hook by invoking a program
-# (script, executable, binary, etc.) named 'pre-unlock' (for which
-# this file is a template), with the following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] PATH (the path in the repository about to be unlocked)
-# [3] USER (the user destroying the lock)
-# [4] TOKEN (the lock token to be destroyed)
-# [5] BREAK-UNLOCK (1 if the user is breaking the lock, else 0)
-#
-# If the hook program exits with success, the lock is destroyed; but
-# if it exits with failure (non-zero), the unlock action is aborted
-# and STDERR is returned to the client.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'pre-unlock'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'pre-unlock' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'pre-unlock.bat' or 'pre-unlock.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-PATH="$2"
-USER="$3"
-TOKEN="$4"
-BREAK="$5"
-
-# If a lock is owned by a different person, don't allow it be broken.
-# (Maybe this script could send email to the lock owner?)
-
-SVNLOOK=/opt/homebrew/Cellar/subversion/1.14.2_1/bin/svnlook
-GREP=/bin/grep
-SED=/bin/sed
-
-LOCK_OWNER=`$SVNLOOK lock "$REPOS" "$PATH" | \
- $GREP '^Owner: ' | $SED 's/Owner: //'`
-
-# If we get no result from svnlook, there's no lock, return success:
-if [ "$LOCK_OWNER" = "" ]; then
- exit 0
-fi
-
-# If the person unlocking matches the lock's owner, return success:
-if [ "$LOCK_OWNER" = "$USER" ]; then
- exit 0
-fi
-
-# Otherwise, we've got an owner mismatch, so return failure:
-echo "Error: $PATH locked by ${LOCK_OWNER}." 1>&2
-exit 1
diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl
deleted file mode 100755
index 1395e8315a..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/bin/sh
-
-# START-COMMIT HOOK
-#
-# The start-commit hook is invoked immediately after a Subversion txn is
-# created and populated with initial revprops in the process of doing a
-# commit. Subversion runs this hook by invoking a program (script,
-# executable, binary, etc.) named 'start-commit' (for which this file
-# is a template) with the following ordered arguments:
-#
-# [1] REPOS-PATH (the path to this repository)
-# [2] USER (the authenticated user attempting to commit)
-# [3] CAPABILITIES (a colon-separated list of capabilities reported
-# by the client; see note below)
-# [4] TXN-NAME (the name of the commit txn just created)
-#
-# Note: The CAPABILITIES parameter is new in Subversion 1.5, and 1.5
-# clients will typically report at least the "mergeinfo" capability.
-# If there are other capabilities, then the list is colon-separated,
-# e.g.: "mergeinfo:some-other-capability" (the order is undefined).
-#
-# The list is self-reported by the client. Therefore, you should not
-# make security assumptions based on the capabilities list, nor should
-# you assume that clients reliably report every capability they have.
-#
-# Note: The TXN-NAME parameter is new in Subversion 1.8. Prior to version
-# 1.8, the start-commit hook was invoked before the commit txn was even
-# created, so the ability to inspect the commit txn and its metadata from
-# within the start-commit hook was not possible.
-#
-# If the hook program exits with success, the commit continues; but
-# if it exits with failure (non-zero), the commit is stopped before
-# a Subversion txn is created, and STDERR is returned to the client.
-#
-# The default working directory for the invocation is undefined, so
-# the program should set one explicitly if it cares.
-#
-# On a Unix system, the normal procedure is to have 'start-commit'
-# invoke other programs to do the real work, though it may do the
-# work itself too.
-#
-# Note that 'start-commit' must be executable by the user(s) who will
-# invoke it (typically the user httpd runs as), and that user must
-# have filesystem-level permission to access the repository.
-#
-# On a Windows system, you should name the hook program
-# 'start-commit.bat' or 'start-commit.exe',
-# but the basic idea is the same.
-#
-# The hook program runs in an empty environment, unless the server is
-# explicitly configured otherwise. For example, a common problem is for
-# the PATH environment variable to not be set to its usual value, so
-# that subprograms fail to launch unless invoked via absolute path.
-# If you're having unexpected problems with a hook program, the
-# culprit may be unusual (or missing) environment variables.
-#
-# CAUTION:
-# For security reasons, you MUST always properly quote arguments when
-# you use them, as those arguments could contain whitespace or other
-# problematic characters. Additionally, you should delimit the list
-# of options with "--" before passing the arguments, so malicious
-# clients cannot bootleg unexpected options to the commands your
-# script aims to execute.
-# For similar reasons, you should also add a trailing @ to URLs which
-# are passed to SVN commands accepting URLs with peg revisions.
-#
-# Here is an example hook script, for a Unix /bin/sh interpreter.
-# For more examples and pre-written hooks, see those in
-# the Subversion repository at
-# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and
-# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/
-
-
-REPOS="$1"
-USER="$2"
-
-commit-allower.pl --repository "$REPOS" --user "$USER" || exit 1
-special-auth-check.py --user "$USER" --auth-level 3 || exit 1
-
-# All checks passed, so allow the commit.
-exit 0
diff --git a/manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock b/manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock
deleted file mode 100644
index 20dd6369be..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock
+++ /dev/null
@@ -1,3 +0,0 @@
-This file is not used by Subversion 1.3.x or later.
-However, its existence is required for compatibility with
-Subversion 1.2.x or earlier.
diff --git a/manage_externals/test/repos/simple-ext.svn/locks/db.lock b/manage_externals/test/repos/simple-ext.svn/locks/db.lock
deleted file mode 100644
index 20dd6369be..0000000000
--- a/manage_externals/test/repos/simple-ext.svn/locks/db.lock
+++ /dev/null
@@ -1,3 +0,0 @@
-This file is not used by Subversion 1.3.x or later.
-However, its existence is required for compatibility with
-Subversion 1.2.x or earlier.
diff --git a/manage_externals/test/requirements.txt b/manage_externals/test/requirements.txt
deleted file mode 100644
index d66f6f1e67..0000000000
--- a/manage_externals/test/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-pylint>=1.7.0
-autopep8>=1.3.0
-coverage>=4.4.0
-coveralls>=1.2.0
-sphinx>=1.6.0
diff --git a/manage_externals/test/test_sys_checkout.py b/manage_externals/test/test_sys_checkout.py
deleted file mode 100755
index 664160dc99..0000000000
--- a/manage_externals/test/test_sys_checkout.py
+++ /dev/null
@@ -1,1871 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for checkout_externals
-
-Terminology:
- * 'container': a repo that has externals
- * 'simple': a repo that has no externals, but is referenced as an external by another repo.
- * 'mixed': a repo that both has externals and is referenced as an external by another repo.
-
- * 'clean': the local repo matches the version in the externals and has no local modifications.
- * 'empty': the external isn't checked out at all.
-
-Note: this script assume the path to the manic and
-checkout_externals module is already in the python path. This is
-usually handled by the makefile. If you call it directly, you may need
-to adjust your path.
-
-NOTE(bja, 2017-11) If a test fails, we want to keep the repo for that
-test. But the tests will keep running, so we need a unique name. Also,
-tearDown is always called after each test. I haven't figured out how
-to determine if an assertion failed and whether it is safe to clean up
-the test repos.
-
-So the solution is:
-
-* assign a unique id to each test repo.
-
-* never cleanup during the run.
-
-* Erase any existing repos at the begining of the module in
-setUpModule.
-"""
-
-# NOTE(bja, 2017-11) pylint complains that the module is too big, but
-# I'm still working on how to break up the tests and still have the
-# temporary directory be preserved....
-# pylint: disable=too-many-lines
-
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import logging
-import os
-import os.path
-import shutil
-import unittest
-
-from manic.externals_description import ExternalsDescription
-from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM
-from manic.externals_description import git_submodule_status
-from manic.externals_status import ExternalStatus
-from manic.repository_git import GitRepository
-from manic.utils import printlog, execute_subprocess
-from manic.global_constants import LOCAL_PATH_INDICATOR, VERBOSITY_DEFAULT
-from manic.global_constants import LOG_FILE_NAME
-from manic import checkout
-
-# ConfigParser was renamed in python2 to configparser. In python2,
-# ConfigParser returns byte strings, str, instead of unicode. We need
-# unicode to be compatible with xml and json parser and python3.
-try:
- # python2
- from ConfigParser import SafeConfigParser as config_parser
-except ImportError:
- # python3
- from configparser import ConfigParser as config_parser
-
-# ---------------------------------------------------------------------
-#
-# Global constants
-#
-# ---------------------------------------------------------------------
-
-
-# Module-wide root directory for all the per-test subdirs we'll create on
-# the fly (which are placed under wherever $CWD is when the test runs).
-# Set by setupModule().
-module_tmp_root_dir = None
-TMP_REPO_DIR_NAME = 'tmp' # subdir under $CWD
-
-# subdir under test/ that holds all of our checked-in repositories (which we
-# will clone for these tests).
-BARE_REPO_ROOT_NAME = 'repos'
-
-# Environment var referenced by checked-in externals file in mixed-cont-ext.git,
-# which should be pointed to the fully-resolved BARE_REPO_ROOT_NAME directory.
-# We explicitly clear this after every test, via tearDown().
-MIXED_CONT_EXT_ROOT_ENV_VAR = 'MANIC_TEST_BARE_REPO_ROOT'
-
-# Subdirs under bare repo root, each holding a repository. For more info
-# on the contents of these repositories, see test/repos/README.md. In these
-# tests the 'parent' repos are cloned as a starting point, whereas the 'child'
-# repos are checked out when the tests run checkout_externals.
-CONTAINER_REPO = 'container.git' # Parent repo
-SIMPLE_REPO = 'simple-ext.git' # Child repo
-SIMPLE_FORK_REPO = 'simple-ext-fork.git' # Child repo
-MIXED_REPO = 'mixed-cont-ext.git' # Both parent and child
-SVN_TEST_REPO = 'simple-ext.svn' # Subversion repository
-
-# Standard (arbitrary) external names for test configs
-TAG_SECTION = 'simp_tag'
-BRANCH_SECTION = 'simp_branch'
-HASH_SECTION = 'simp_hash'
-
-# All the configs we construct check out their externals into these local paths.
-EXTERNALS_PATH = 'externals'
-SUB_EXTERNALS_PATH = 'src' # For mixed test repos,
-
-# For testing behavior with '.' instead of an explicit paths.
-SIMPLE_LOCAL_ONLY_NAME = '.'
-
-# Externals files.
-CFG_NAME = 'externals.cfg' # We construct this on a per-test basis.
-CFG_SUB_NAME = 'sub-externals.cfg' # Already exists in mixed-cont-ext repo.
-
-# Arbitrary text file in all the test repos.
-README_NAME = 'readme.txt'
-
-# Branch that exists in both the simple and simple-fork repos.
-REMOTE_BRANCH_FEATURE2 = 'feature2'
-
-# Disable too-many-public-methods error
-# pylint: disable=R0904
-
-def setUpModule(): # pylint: disable=C0103
- """Setup for all tests in this module. It is called once per module!
- """
- logging.basicConfig(filename=LOG_FILE_NAME,
- format='%(levelname)s : %(asctime)s : %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S',
- level=logging.DEBUG)
- repo_root = os.path.join(os.getcwd(), TMP_REPO_DIR_NAME)
- repo_root = os.path.abspath(repo_root)
- # delete if it exists from previous runs
- try:
- shutil.rmtree(repo_root)
- except BaseException:
- pass
- # create clean dir for this run
- os.mkdir(repo_root)
-
- # Make available to all tests in this file.
- global module_tmp_root_dir
- assert module_tmp_root_dir == None, module_tmp_root_dir
- module_tmp_root_dir = repo_root
-
-
-class RepoUtils(object):
- """Convenience methods for interacting with git repos."""
- @staticmethod
- def create_branch(repo_base_dir, external_name, branch, with_commit=False):
- """Create branch and optionally (with_commit) add a single commit.
- """
- # pylint: disable=R0913
- cwd = os.getcwd()
- repo_root = os.path.join(repo_base_dir, EXTERNALS_PATH, external_name)
- os.chdir(repo_root)
- cmd = ['git', 'checkout', '-b', branch, ]
- execute_subprocess(cmd)
- if with_commit:
- msg = 'start work on {0}'.format(branch)
- with open(README_NAME, 'a') as handle:
- handle.write(msg)
- cmd = ['git', 'add', README_NAME, ]
- execute_subprocess(cmd)
- cmd = ['git', 'commit', '-m', msg, ]
- execute_subprocess(cmd)
- os.chdir(cwd)
-
- @staticmethod
- def create_commit(repo_base_dir, external_name):
- """Make a commit to the given external.
-
- This is used to test sync state changes from local commits on
- detached heads and tracking branches.
- """
- cwd = os.getcwd()
- repo_root = os.path.join(repo_base_dir, EXTERNALS_PATH, external_name)
- os.chdir(repo_root)
-
- msg = 'work on great new feature!'
- with open(README_NAME, 'a') as handle:
- handle.write(msg)
- cmd = ['git', 'add', README_NAME, ]
- execute_subprocess(cmd)
- cmd = ['git', 'commit', '-m', msg, ]
- execute_subprocess(cmd)
- os.chdir(cwd)
-
- @staticmethod
- def clone_test_repo(bare_root, test_id, parent_repo_name, dest_dir_in):
- """Clone repo at / into dest_dir_in or local per-test-subdir.
-
- Returns output dir.
- """
- parent_repo_dir = os.path.join(bare_root, parent_repo_name)
- if dest_dir_in is None:
- # create unique subdir for this test
- test_dir_name = test_id
- print("Test repository name: {0}".format(test_dir_name))
- dest_dir = os.path.join(module_tmp_root_dir, test_dir_name)
- else:
- dest_dir = dest_dir_in
-
- # pylint: disable=W0212
- GitRepository._git_clone(parent_repo_dir, dest_dir, VERBOSITY_DEFAULT)
- return dest_dir
-
- @staticmethod
- def add_file_to_repo(under_test_dir, filename, tracked):
- """Add a file to the repository so we can put it into a dirty state
-
- """
- cwd = os.getcwd()
- os.chdir(under_test_dir)
- with open(filename, 'w') as tmp:
- tmp.write('Hello, world!')
-
- if tracked:
- # NOTE(bja, 2018-01) brittle hack to obtain repo dir and
- # file name
- path_data = filename.split('/')
- repo_dir = os.path.join(path_data[0], path_data[1])
- os.chdir(repo_dir)
- tracked_file = path_data[2]
- cmd = ['git', 'add', tracked_file]
- execute_subprocess(cmd)
-
- os.chdir(cwd)
-
-class GenerateExternalsDescriptionCfgV1(object):
- """Building blocks to create ExternalsDescriptionCfgV1 files.
-
- Basic usage: create_config() multiple create_*(), then write_config().
- Optionally after that: write_with_*().
- """
-
- def __init__(self, bare_root):
- self._schema_version = '1.1.0'
- self._config = None
-
- # directory where we have test repositories (which we will clone for
- # tests)
- self._bare_root = bare_root
-
- def write_config(self, dest_dir, filename=CFG_NAME):
- """Write self._config to disk
-
- """
- dest_path = os.path.join(dest_dir, filename)
- with open(dest_path, 'w') as configfile:
- self._config.write(configfile)
-
- def create_config(self):
- """Create an config object and add the required metadata section
-
- """
- self._config = config_parser()
- self.create_metadata()
-
- def create_metadata(self):
- """Create the metadata section of the config file
- """
- self._config.add_section(DESCRIPTION_SECTION)
-
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM,
- self._schema_version)
-
- def url_for_repo_path(self, repo_path, repo_path_abs=None):
- if repo_path_abs is not None:
- return repo_path_abs
- else:
- return os.path.join(self._bare_root, repo_path)
-
- def create_section(self, repo_path, name, tag='', branch='',
- ref_hash='', required=True, path=EXTERNALS_PATH,
- sub_externals='', repo_path_abs=None, from_submodule=False,
- sparse='', nested=False):
- # pylint: disable=too-many-branches
- """Create a config ExternalsDescription section with the given name.
-
- Autofills some items and handles some optional items.
-
- repo_path_abs overrides repo_path (which is relative to the bare repo)
- path is a subdir under repo_path to check out to.
- """
- # pylint: disable=R0913
- self._config.add_section(name)
- if not from_submodule:
- if nested:
- self._config.set(name, ExternalsDescription.PATH, path)
- else:
- self._config.set(name, ExternalsDescription.PATH,
- os.path.join(path, name))
-
- self._config.set(name, ExternalsDescription.PROTOCOL,
- ExternalsDescription.PROTOCOL_GIT)
-
- # from_submodules is incompatible with some other options, turn them off
- if (from_submodule and
- ((repo_path_abs is not None) or tag or ref_hash or branch)):
- printlog('create_section: "from_submodule" is incompatible with '
- '"repo_url", "tag", "hash", and "branch" options;\n'
- 'Ignoring those options for {}'.format(name))
- repo_url = None
- tag = ''
- ref_hash = ''
- branch = ''
-
- repo_url = self.url_for_repo_path(repo_path, repo_path_abs)
-
- if not from_submodule:
- self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
-
- self._config.set(name, ExternalsDescription.REQUIRED, str(required))
-
- if tag:
- self._config.set(name, ExternalsDescription.TAG, tag)
-
- if branch:
- self._config.set(name, ExternalsDescription.BRANCH, branch)
-
- if ref_hash:
- self._config.set(name, ExternalsDescription.HASH, ref_hash)
-
- if sub_externals:
- self._config.set(name, ExternalsDescription.EXTERNALS,
- sub_externals)
-
- if sparse:
- self._config.set(name, ExternalsDescription.SPARSE, sparse)
-
- if from_submodule:
- self._config.set(name, ExternalsDescription.SUBMODULE, "True")
-
- def create_section_reference_to_subexternal(self, name):
- """Just a reference to another externals file.
-
- """
- # pylint: disable=R0913
- self._config.add_section(name)
- self._config.set(name, ExternalsDescription.PATH, LOCAL_PATH_INDICATOR)
-
- self._config.set(name, ExternalsDescription.PROTOCOL,
- ExternalsDescription.PROTOCOL_EXTERNALS_ONLY)
-
- self._config.set(name, ExternalsDescription.REPO_URL,
- LOCAL_PATH_INDICATOR)
-
- self._config.set(name, ExternalsDescription.REQUIRED, str(True))
-
- self._config.set(name, ExternalsDescription.EXTERNALS, CFG_SUB_NAME)
-
- def create_svn_external(self, name, url, tag='', branch=''):
- """Create a config section for an svn repository.
-
- """
- self._config.add_section(name)
- self._config.set(name, ExternalsDescription.PATH,
- os.path.join(EXTERNALS_PATH, name))
-
- self._config.set(name, ExternalsDescription.PROTOCOL,
- ExternalsDescription.PROTOCOL_SVN)
-
- self._config.set(name, ExternalsDescription.REPO_URL, url)
-
- self._config.set(name, ExternalsDescription.REQUIRED, str(True))
-
- if tag:
- self._config.set(name, ExternalsDescription.TAG, tag)
-
- if branch:
- self._config.set(name, ExternalsDescription.BRANCH, branch)
-
- def write_with_git_branch(self, dest_dir, name, branch, new_remote_repo_path=None):
- """Update fields in our config and write it to disk.
-
- name is the key of the ExternalsDescription in self._config to update.
- """
- # pylint: disable=R0913
- self._config.set(name, ExternalsDescription.BRANCH, branch)
-
- if new_remote_repo_path:
- if new_remote_repo_path == SIMPLE_LOCAL_ONLY_NAME:
- repo_url = SIMPLE_LOCAL_ONLY_NAME
- else:
- repo_url = os.path.join(self._bare_root, new_remote_repo_path)
- self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
-
- try:
- # remove the tag if it existed
- self._config.remove_option(name, ExternalsDescription.TAG)
- except BaseException:
- pass
-
- self.write_config(dest_dir)
-
- def write_with_svn_branch(self, dest_dir, name, branch):
- """Update a repository branch, and potentially the remote.
- """
- # pylint: disable=R0913
- self._config.set(name, ExternalsDescription.BRANCH, branch)
-
- try:
- # remove the tag if it existed
- self._config.remove_option(name, ExternalsDescription.TAG)
- except BaseException:
- pass
-
- self.write_config(dest_dir)
-
- def write_with_tag_and_remote_repo(self, dest_dir, name, tag, new_remote_repo_path,
- remove_branch=True):
- """Update a repository tag and the remote.
-
- NOTE(bja, 2017-11) remove_branch=False should result in an
- overspecified external with both a branch and tag. This is
- used for error condition testing.
-
- """
- # pylint: disable=R0913
- self._config.set(name, ExternalsDescription.TAG, tag)
-
- if new_remote_repo_path:
- repo_url = os.path.join(self._bare_root, new_remote_repo_path)
- self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
-
- try:
- # remove the branch if it existed
- if remove_branch:
- self._config.remove_option(name, ExternalsDescription.BRANCH)
- except BaseException:
- pass
-
- self.write_config(dest_dir)
-
- def write_without_branch_tag(self, dest_dir, name):
- """Update a repository protocol, and potentially the remote
- """
- # pylint: disable=R0913
- try:
- # remove the branch if it existed
- self._config.remove_option(name, ExternalsDescription.BRANCH)
- except BaseException:
- pass
-
- try:
- # remove the tag if it existed
- self._config.remove_option(name, ExternalsDescription.TAG)
- except BaseException:
- pass
-
- self.write_config(dest_dir)
-
- def write_without_repo_url(self, dest_dir, name):
- """Update a repository protocol, and potentially the remote
- """
- # pylint: disable=R0913
- try:
- # remove the repo url if it existed
- self._config.remove_option(name, ExternalsDescription.REPO_URL)
- except BaseException:
- pass
-
- self.write_config(dest_dir)
-
- def write_with_protocol(self, dest_dir, name, protocol, repo_path=None):
- """Update a repository protocol, and potentially the remote
- """
- # pylint: disable=R0913
- self._config.set(name, ExternalsDescription.PROTOCOL, protocol)
-
- if repo_path:
- repo_url = os.path.join(self._bare_root, repo_path)
- self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
-
- self.write_config(dest_dir)
-
-
-def _execute_checkout_in_dir(dirname, args, debug_env=''):
- """Execute the checkout command in the appropriate repo dir with the
- specified additional args.
-
- args should be a list of strings.
- debug_env shuld be a string of the form 'FOO=bar' or the empty string.
-
- Note that we are calling the command line processing and main
- routines and not using a subprocess call so that we get code
- coverage results! Note this means that environment variables are passed
- to checkout_externals via os.environ; debug_env is just used to aid
- manual reproducibility of a given call.
-
- Returns (overall_status, tree_status)
- where overall_status is 0 for success, nonzero otherwise.
- and tree_status is set if --status was passed in, None otherwise.
-
- Note this command executes the checkout command, it doesn't
- necessarily do any checking out (e.g. if --status is passed in).
- """
- cwd = os.getcwd()
-
- # Construct a command line for reproducibility; this command is not
- # actually executed in the test.
- os.chdir(dirname)
- cmdline = ['--externals', CFG_NAME, ]
- cmdline += args
- manual_cmd = ('Running equivalent of:\n'
- 'pushd {dirname}; '
- '{debug_env} /path/to/checkout_externals {args}'.format(
- dirname=dirname, debug_env=debug_env,
- args=' '.join(cmdline)))
- printlog(manual_cmd)
- options = checkout.commandline_arguments(cmdline)
- overall_status, tree_status = checkout.main(options)
- os.chdir(cwd)
- return overall_status, tree_status
-
-class BaseTestSysCheckout(unittest.TestCase):
- """Base class of reusable systems level test setup for
- checkout_externals
-
- """
- # NOTE(bja, 2017-11) pylint complains about long method names, but
- # it is hard to differentiate tests without making them more
- # cryptic.
- # pylint: disable=invalid-name
-
- # Command-line args for checkout_externals, used in execute_checkout_in_dir()
- status_args = ['--status']
- checkout_args = []
- optional_args = ['--optional']
- verbose_args = ['--status', '--verbose']
-
- def setUp(self):
- """Setup for all individual checkout_externals tests
- """
- # directory we want to return to after the test system and
- # checkout_externals are done cd'ing all over the place.
- self._return_dir = os.getcwd()
-
- self._test_id = self.id().split('.')[-1]
-
- # find root
- if os.path.exists(os.path.join(os.getcwd(), 'checkout_externals')):
- root_dir = os.path.abspath(os.getcwd())
- else:
- # maybe we are in a subdir, search up
- root_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
- while os.path.basename(root_dir):
- if os.path.exists(os.path.join(root_dir, 'checkout_externals')):
- break
- root_dir = os.path.dirname(root_dir)
-
- if not os.path.exists(os.path.join(root_dir, 'checkout_externals')):
- raise RuntimeError('Cannot find checkout_externals')
-
- # path to the executable
- self._checkout = os.path.join(root_dir, 'checkout_externals')
-
- # directory where we have test repositories (which we will clone for
- # tests)
- self._bare_root = os.path.abspath(
- os.path.join(root_dir, 'test', BARE_REPO_ROOT_NAME))
-
- # set the input file generator
- self._generator = GenerateExternalsDescriptionCfgV1(self._bare_root)
- # set the input file generator for secondary externals
- self._sub_generator = GenerateExternalsDescriptionCfgV1(self._bare_root)
-
- def tearDown(self):
- """Tear down for individual tests
- """
- # return to our common starting point
- os.chdir(self._return_dir)
-
- # (in case this was set) Don't pollute environment of other tests.
- os.environ.pop(MIXED_CONT_EXT_ROOT_ENV_VAR,
- None) # Don't care if key wasn't set.
-
- def clone_test_repo(self, parent_repo_name, dest_dir_in=None):
- """Clones repo under self._bare_root"""
- return RepoUtils.clone_test_repo(self._bare_root, self._test_id,
- parent_repo_name, dest_dir_in)
-
- def execute_checkout_in_dir(self, dirname, args, debug_env=''):
- overall_status, tree_status = _execute_checkout_in_dir(dirname, args,
- debug_env=debug_env)
- self.assertEqual(overall_status, 0)
- return tree_status
-
- def execute_checkout_with_status(self, dirname, args, debug_env=''):
- """Calls checkout a second time to get status if needed."""
- tree_status = self.execute_checkout_in_dir(
- dirname, args, debug_env=debug_env)
- if tree_status is None:
- tree_status = self.execute_checkout_in_dir(dirname,
- self.status_args,
- debug_env=debug_env)
- self.assertNotEqual(tree_status, None)
- return tree_status
-
- def _check_sync_clean(self, ext_status, expected_sync_state,
- expected_clean_state):
- self.assertEqual(ext_status.sync_state, expected_sync_state)
- self.assertEqual(ext_status.clean_state, expected_clean_state)
-
- @staticmethod
- def _external_path(section_name, base_path=EXTERNALS_PATH):
- return './{0}/{1}'.format(base_path, section_name)
-
- def _check_file_exists(self, repo_dir, pathname):
- "Check that exists in "
- self.assertTrue(os.path.exists(os.path.join(repo_dir, pathname)))
-
- def _check_file_absent(self, repo_dir, pathname):
- "Check that does not exist in "
- self.assertFalse(os.path.exists(os.path.join(repo_dir, pathname)))
-
-
-class TestSysCheckout(BaseTestSysCheckout):
- """Run systems level tests of checkout_externals
- """
- # NOTE(bja, 2017-11) pylint complains about long method names, but
- # it is hard to differentiate tests without making them more
- # cryptic.
- # pylint: disable=invalid-name
-
- # ----------------------------------------------------------------
- #
- # Run systems tests
- #
- # ----------------------------------------------------------------
- def test_required_bytag(self):
- """Check out a required external pointing to a git tag."""
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag1')
- self._generator.write_config(cloned_repo_dir)
-
- # externals start out 'empty' aka not checked out.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.status_args)
- local_path_rel = self._external_path(TAG_SECTION)
- self._check_sync_clean(tree[local_path_rel],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- local_path_abs = os.path.join(cloned_repo_dir, local_path_rel)
- self.assertFalse(os.path.exists(local_path_abs))
-
- # after checkout, the external is 'clean' aka at the correct version.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[local_path_rel],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # Actually checked out the desired repo.
- self.assertEqual('origin', GitRepository._remote_name_for_url(
- # Which url to look up
- self._generator.url_for_repo_path(SIMPLE_REPO),
- # Which directory has the local checked-out repo.
- dirname=local_path_abs))
-
- # Actually checked out the desired tag.
- (tag_found, tag_name) = GitRepository._git_current_tag(local_path_abs)
- self.assertEqual(tag_name, 'tag1')
-
- # Check existence of some simp_tag files
- tag_path = os.path.join('externals', TAG_SECTION)
- self._check_file_exists(cloned_repo_dir,
- os.path.join(tag_path, README_NAME))
- # Subrepo should not exist (not referenced by configs).
- self._check_file_absent(cloned_repo_dir, os.path.join(tag_path,
- 'simple_subdir',
- 'subdir_file.txt'))
-
- def test_required_bybranch(self):
- """Check out a required external pointing to a git branch."""
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # externals start out 'empty' aka not checked out.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.status_args)
- local_path_rel = self._external_path(BRANCH_SECTION)
- self._check_sync_clean(tree[local_path_rel],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- local_path_abs = os.path.join(cloned_repo_dir, local_path_rel)
- self.assertFalse(os.path.exists(local_path_abs))
-
- # after checkout, the external is 'clean' aka at the correct version.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[local_path_rel],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self.assertTrue(os.path.exists(local_path_abs))
-
- # Actually checked out the desired repo.
- self.assertEqual('origin', GitRepository._remote_name_for_url(
- # Which url to look up
- self._generator.url_for_repo_path(SIMPLE_REPO),
- # Which directory has the local checked-out repo.
- dirname=local_path_abs))
-
- # Actually checked out the desired branch.
- (branch_found, branch_name) = GitRepository._git_current_remote_branch(
- local_path_abs)
- self.assertEquals(branch_name, 'origin/' + REMOTE_BRANCH_FEATURE2)
-
- def test_required_byhash(self):
- """Check out a required external pointing to a git hash."""
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, HASH_SECTION,
- ref_hash='60b1cc1a38d63')
- self._generator.write_config(cloned_repo_dir)
-
- # externals start out 'empty' aka not checked out.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.status_args)
- local_path_rel = self._external_path(HASH_SECTION)
- self._check_sync_clean(tree[local_path_rel],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- local_path_abs = os.path.join(cloned_repo_dir, local_path_rel)
- self.assertFalse(os.path.exists(local_path_abs))
-
- # after checkout, the externals are 'clean' aka at their correct version.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[local_path_rel],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # Actually checked out the desired repo.
- self.assertEqual('origin', GitRepository._remote_name_for_url(
- # Which url to look up
- self._generator.url_for_repo_path(SIMPLE_REPO),
- # Which directory has the local checked-out repo.
- dirname=local_path_abs))
-
- # Actually checked out the desired hash.
- (hash_found, hash_name) = GitRepository._git_current_hash(
- local_path_abs)
- self.assertTrue(hash_name.startswith('60b1cc1a38d63'),
- msg=hash_name)
-
- def test_container_nested_required(self):
- """Verify that a container with nested subrepos generates the correct initial status.
- Tests over all possible permutations
- """
- # Output subdirs for each of the externals, to test that one external can be
- # checked out in a subdir of another.
- NESTED_SUBDIR = ['./fred', './fred/wilma', './fred/wilma/barney']
-
- # Assert that each type of external (e.g. tag vs branch) can be at any parent level
- # (e.g. child/parent/grandparent).
- orders = [[0, 1, 2], [1, 2, 0], [2, 0, 1],
- [0, 2, 1], [2, 1, 0], [1, 0, 2]]
- for n, order in enumerate(orders):
- dest_dir = os.path.join(module_tmp_root_dir, self._test_id,
- "test"+str(n))
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO,
- dest_dir_in=dest_dir)
- self._generator.create_config()
- # We happen to check out each section via a different reference (tag/branch/hash) but
- # those don't really matter, we just need to check out three repos into a nested set of
- # directories.
- self._generator.create_section(
- SIMPLE_REPO, TAG_SECTION, nested=True,
- tag='tag1', path=NESTED_SUBDIR[order[0]])
- self._generator.create_section(
- SIMPLE_REPO, BRANCH_SECTION, nested=True,
- branch=REMOTE_BRANCH_FEATURE2, path=NESTED_SUBDIR[order[1]])
- self._generator.create_section(
- SIMPLE_REPO, HASH_SECTION, nested=True,
- ref_hash='60b1cc1a38d63', path=NESTED_SUBDIR[order[2]])
- self._generator.write_config(cloned_repo_dir)
-
- # all externals start out 'empty' aka not checked out.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.status_args)
- self._check_sync_clean(tree[NESTED_SUBDIR[order[0]]],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self._check_sync_clean(tree[NESTED_SUBDIR[order[1]]],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self._check_sync_clean(tree[NESTED_SUBDIR[order[2]]],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
-
- # after checkout, all the repos are 'clean'.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[NESTED_SUBDIR[order[0]]],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[NESTED_SUBDIR[order[1]]],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[NESTED_SUBDIR[order[2]]],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_simple_optional(self):
- """Verify that container with an optional simple subrepos generates
- the correct initial status.
-
- """
- # create repo and externals config.
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, 'simp_req',
- tag='tag1')
-
- self._generator.create_section(SIMPLE_REPO, 'simp_opt',
- tag='tag1', required=False)
-
- self._generator.write_config(cloned_repo_dir)
-
- # all externals start out 'empty' aka not checked out.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.status_args)
- req_status = tree[self._external_path('simp_req')]
- self._check_sync_clean(req_status,
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self.assertEqual(req_status.source_type, ExternalStatus.MANAGED)
-
- opt_status = tree[self._external_path('simp_opt')]
- self._check_sync_clean(opt_status,
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL)
-
- # after checkout, required external is clean, optional is still empty.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- req_status = tree[self._external_path('simp_req')]
- self._check_sync_clean(req_status,
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self.assertEqual(req_status.source_type, ExternalStatus.MANAGED)
-
- opt_status = tree[self._external_path('simp_opt')]
- self._check_sync_clean(opt_status,
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL)
-
- # after checking out optionals, the optional external is also clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.optional_args)
- req_status = tree[self._external_path('simp_req')]
- self._check_sync_clean(req_status,
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self.assertEqual(req_status.source_type, ExternalStatus.MANAGED)
-
- opt_status = tree[self._external_path('simp_opt')]
- self._check_sync_clean(opt_status,
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL)
-
- def test_container_simple_verbose(self):
- """Verify that verbose status matches non-verbose.
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag1')
- self._generator.write_config(cloned_repo_dir)
-
- # after checkout, all externals should be 'clean'.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # 'Verbose' status should tell the same story.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.verbose_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_simple_dirty(self):
- """Verify that a container with a new tracked file is marked dirty.
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag1')
- self._generator.write_config(cloned_repo_dir)
-
- # checkout, should start out clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # add a tracked file to the simp_tag external, should be dirty.
- RepoUtils.add_file_to_repo(cloned_repo_dir,
- 'externals/{0}/tmp.txt'.format(TAG_SECTION),
- tracked=True)
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.DIRTY)
-
- # Re-checkout; simp_tag should still be dirty.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.DIRTY)
-
- def test_container_simple_untracked(self):
- """Verify that a container with simple subrepos and a untracked files
- is not considered 'dirty' and will attempt an update.
-
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag1')
- self._generator.write_config(cloned_repo_dir)
-
- # checkout, should start out clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # add an untracked file to the simp_tag external, should stay clean.
- RepoUtils.add_file_to_repo(cloned_repo_dir,
- 'externals/{0}/tmp.txt'.format(TAG_SECTION),
- tracked=False)
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # After checkout, the external should still be 'clean'.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_simple_detached_sync(self):
- """Verify that a container with simple subrepos generates the correct
- out of sync status when making commits from a detached head
- state.
-
- For more info about 'detached head' state: https://www.cloudbees.com/blog/git-detached-head
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag1')
-
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
-
- self._generator.create_section(SIMPLE_REPO, 'simp_hash',
- ref_hash='60b1cc1a38d63')
-
- self._generator.write_config(cloned_repo_dir)
-
- # externals start out 'empty' aka not checked out.
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self._check_sync_clean(tree[self._external_path(HASH_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
-
- # checkout
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- # Commit on top of the tag and hash (creating the detached head state in those two
- # externals' repos)
- # The branch commit does not create the detached head state, but here for completeness.
- RepoUtils.create_commit(cloned_repo_dir, TAG_SECTION)
- RepoUtils.create_commit(cloned_repo_dir, HASH_SECTION)
- RepoUtils.create_commit(cloned_repo_dir, BRANCH_SECTION)
-
- # sync status of all three should be 'modified' (uncommitted changes)
- # clean status is 'ok' (matches externals version)
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(HASH_SECTION)],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
-
- # after checkout, all externals should be totally clean (no uncommitted changes,
- # and matches externals version).
- tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(HASH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_remote_branch(self):
- """Verify that a container with remote branch change works
-
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # initial checkout
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- # update the branch external to point to a different remote with the same branch,
- # then simp_branch should be out of sync
- self._generator.write_with_git_branch(cloned_repo_dir,
- name=BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2,
- new_remote_repo_path=SIMPLE_FORK_REPO)
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
-
- # checkout new externals, now simp_branch should be clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_remote_tag_same_branch(self):
- """Verify that a container with remote tag change works. The new tag
- should not be in the original repo, only the new remote
- fork. The new tag is automatically fetched because it is on
- the branch.
-
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # initial checkout
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- # update the config file to point to a different remote with
- # the new tag replacing the old branch. Tag MUST NOT be in the original
- # repo! status of simp_branch should then be out of sync
- self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION,
- tag='forked-feature-v1',
- new_remote_repo_path=SIMPLE_FORK_REPO)
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.status_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
-
- # checkout new externals, then should be synced.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_remote_tag_fetch_all(self):
- """Verify that a container with remote tag change works. The new tag
- should not be in the original repo, only the new remote
- fork. It should also not be on a branch that will be fetched,
- and therefore not fetched by default with 'git fetch'. It will
- only be retrieved by 'git fetch --tags'
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # initial checkout
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- # update the config file to point to a different remote with
- # the new tag instead of the old branch. Tag MUST NOT be in the original
- # repo! status of simp_branch should then be out of sync.
- self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION,
- tag='abandoned-feature',
- new_remote_repo_path=SIMPLE_FORK_REPO)
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
-
- # checkout new externals, should be clean again.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_preserve_dot(self):
- """Verify that after inital checkout, modifying an external git repo
- url to '.' and the current branch will leave it unchanged.
-
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # initial checkout
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- # update the config file to point to a different remote with
- # the same branch.
- self._generator.write_with_git_branch(cloned_repo_dir, name=BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2,
- new_remote_repo_path=SIMPLE_FORK_REPO)
- # after checkout, should be clean again.
- tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # update branch to point to a new branch that only exists in
- # the local fork
- RepoUtils.create_branch(cloned_repo_dir, external_name=BRANCH_SECTION,
- branch='private-feature', with_commit=True)
- self._generator.write_with_git_branch(cloned_repo_dir, name=BRANCH_SECTION,
- branch='private-feature',
- new_remote_repo_path=SIMPLE_LOCAL_ONLY_NAME)
- # after checkout, should be clean again.
- tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_mixed_subrepo(self):
- """Verify container with mixed subrepo.
-
- The mixed subrepo has a sub-externals file with different
- sub-externals on different branches.
-
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
-
- self._generator.create_config()
- self._generator.create_section(MIXED_REPO, 'mixed_req',
- branch='master', sub_externals=CFG_SUB_NAME)
- self._generator.write_config(cloned_repo_dir)
-
- # The subrepo has a repo_url that uses this environment variable.
- # It'll be cleared in tearDown().
- os.environ[MIXED_CONT_EXT_ROOT_ENV_VAR] = self._bare_root
- debug_env = MIXED_CONT_EXT_ROOT_ENV_VAR + '=' + self._bare_root
-
- # inital checkout: all requireds are clean, and optional is empty.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args,
- debug_env=debug_env)
- mixed_req_path = self._external_path('mixed_req')
- self._check_sync_clean(tree[mixed_req_path],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- sub_ext_base_path = "{0}/{1}/{2}".format(EXTERNALS_PATH, 'mixed_req', SUB_EXTERNALS_PATH)
- # The already-checked-in subexternals file has a 'simp_branch' section
- self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # update the mixed-use external to point to different branch
- # status should become out of sync for mixed_req, but sub-externals
- # are still in sync
- self._generator.write_with_git_branch(cloned_repo_dir, name='mixed_req',
- branch='new-feature',
- new_remote_repo_path=MIXED_REPO)
- tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args,
- debug_env=debug_env)
- self._check_sync_clean(tree[mixed_req_path],
- ExternalStatus.MODEL_MODIFIED,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # run the checkout. Now the mixed use external and its sub-externals should be clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args,
- debug_env=debug_env)
- self._check_sync_clean(tree[mixed_req_path],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_component(self):
- """Verify that optional component checkout works
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
-
- # create the top level externals file
- self._generator.create_config()
- # Optional external, by tag.
- self._generator.create_section(SIMPLE_REPO, 'simp_opt',
- tag='tag1', required=False)
-
- # Required external, by branch.
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
-
- # Required external, by hash.
- self._generator.create_section(SIMPLE_REPO, HASH_SECTION,
- ref_hash='60b1cc1a38d63')
- self._generator.write_config(cloned_repo_dir)
-
- # inital checkout, first try a nonexistent component argument noref
- checkout_args = ['simp_opt', 'noref']
- checkout_args.extend(self.checkout_args)
-
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, checkout_args)
-
- # Now explicitly check out one optional component..
- # Explicitly listed component (opt) should be present, the other two not.
- checkout_args = ['simp_opt']
- checkout_args.extend(self.checkout_args)
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- checkout_args)
- self._check_sync_clean(tree[self._external_path('simp_opt')],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self._check_sync_clean(tree[self._external_path(HASH_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
-
- # Check out a second component, this one required.
- # Explicitly listed component (branch) should be present, the still-unlisted one (tag) not.
- checkout_args.append(BRANCH_SECTION)
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- checkout_args)
- self._check_sync_clean(tree[self._external_path('simp_opt')],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(HASH_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
-
-
- def test_container_exclude_component(self):
- """Verify that exclude component checkout works
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag1')
-
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
-
- self._generator.create_section(SIMPLE_REPO, 'simp_hash',
- ref_hash='60b1cc1a38d63')
-
- self._generator.write_config(cloned_repo_dir)
-
- # inital checkout should result in all externals being clean except excluded TAG_SECTION.
- checkout_args = ['--exclude', TAG_SECTION]
- checkout_args.extend(self.checkout_args)
- tree = self.execute_checkout_with_status(cloned_repo_dir, checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.EMPTY,
- ExternalStatus.DEFAULT)
- self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path(HASH_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_subexternal(self):
- """Verify that an externals file can be brought in as a reference.
-
- """
- cloned_repo_dir = self.clone_test_repo(MIXED_REPO)
-
- self._generator.create_config()
- self._generator.create_section_reference_to_subexternal('mixed_base')
- self._generator.write_config(cloned_repo_dir)
-
- # The subrepo has a repo_url that uses this environment variable.
- # It'll be cleared in tearDown().
- os.environ[MIXED_CONT_EXT_ROOT_ENV_VAR] = self._bare_root
- debug_env = MIXED_CONT_EXT_ROOT_ENV_VAR + '=' + self._bare_root
-
- # After checkout, confirm required's are clean and the referenced
- # subexternal's contents are also clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args,
- debug_env=debug_env)
-
- self._check_sync_clean(
- tree[self._external_path(BRANCH_SECTION, base_path=SUB_EXTERNALS_PATH)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def test_container_sparse(self):
- """Verify that 'full' container with simple subrepo
- can run a sparse checkout and generate the correct initial status.
-
- """
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
-
- # Create a file to list filenames to checkout.
- sparse_filename = 'sparse_checkout'
- with open(os.path.join(cloned_repo_dir, sparse_filename), 'w') as sfile:
- sfile.write(README_NAME)
-
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION,
- tag='tag2')
-
- # Same tag as above, but with a sparse file too.
- sparse_relpath = '../../' + sparse_filename
- self._generator.create_section(SIMPLE_REPO, 'simp_sparse',
- tag='tag2', sparse=sparse_relpath)
-
- self._generator.write_config(cloned_repo_dir)
-
- # inital checkout, confirm required's are clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._external_path('simp_sparse')],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- # Check existence of some files - full set in TAG_SECTION, and sparse set
- # in 'simp_sparse'.
- subrepo_path = os.path.join('externals', TAG_SECTION)
- self._check_file_exists(cloned_repo_dir,
- os.path.join(subrepo_path, README_NAME))
- self._check_file_exists(cloned_repo_dir, os.path.join(subrepo_path,
- 'simple_subdir',
- 'subdir_file.txt'))
- subrepo_path = os.path.join('externals', 'simp_sparse')
- self._check_file_exists(cloned_repo_dir,
- os.path.join(subrepo_path, README_NAME))
- self._check_file_absent(cloned_repo_dir, os.path.join(subrepo_path,
- 'simple_subdir',
- 'subdir_file.txt'))
-
-class TestSysCheckoutSVN(BaseTestSysCheckout):
- """Run systems level tests of checkout_externals accessing svn repositories
-
- SVN tests - these tests use the svn repository interface.
- """
-
- @staticmethod
- def _svn_branch_name():
- return './{0}/svn_branch'.format(EXTERNALS_PATH)
-
- @staticmethod
- def _svn_tag_name():
- return './{0}/svn_tag'.format(EXTERNALS_PATH)
-
- def _svn_test_repo_url(self):
- return 'file://' + os.path.join(self._bare_root, SVN_TEST_REPO)
-
- def _check_tag_branch_svn_tag_clean(self, tree):
- self._check_sync_clean(tree[self._external_path(TAG_SECTION)],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._svn_branch_name()],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
- self._check_sync_clean(tree[self._svn_tag_name()],
- ExternalStatus.STATUS_OK,
- ExternalStatus.STATUS_OK)
-
- def _have_svn_access(self):
- """Check if we have svn access so we can enable tests that use svn.
-
- """
- have_svn = False
- cmd = ['svn', 'ls', self._svn_test_repo_url(), ]
- try:
- execute_subprocess(cmd)
- have_svn = True
- except BaseException:
- pass
- return have_svn
-
- def _skip_if_no_svn_access(self):
- """Function decorator to disable svn tests when svn isn't available
- """
- have_svn = self._have_svn_access()
- if not have_svn:
- raise unittest.SkipTest("No svn access")
-
- def test_container_simple_svn(self):
- """Verify that a container repo can pull in an svn branch and svn tag.
-
- """
- self._skip_if_no_svn_access()
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
-
- self._generator.create_config()
- # Git repo.
- self._generator.create_section(SIMPLE_REPO, TAG_SECTION, tag='tag1')
-
- # Svn repos.
- self._generator.create_svn_external('svn_branch', self._svn_test_repo_url(), branch='trunk')
- self._generator.create_svn_external('svn_tag', self._svn_test_repo_url(), tag='tags/cesm2.0.beta07')
-
- self._generator.write_config(cloned_repo_dir)
-
- # checkout, make sure all sections are clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_tag_branch_svn_tag_clean(tree)
-
- # update description file to make the tag into a branch and
- # trigger a switch
- self._generator.write_with_svn_branch(cloned_repo_dir, 'svn_tag',
- 'trunk')
-
- # checkout, again the results should be clean.
- tree = self.execute_checkout_with_status(cloned_repo_dir,
- self.checkout_args)
- self._check_tag_branch_svn_tag_clean(tree)
-
- # add an untracked file to the repo
- tracked = False
- RepoUtils.add_file_to_repo(cloned_repo_dir,
- 'externals/svn_branch/tmp.txt', tracked)
-
- # run a no-op checkout.
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- # update description file to make the branch into a tag and
- # trigger a modified sync status
- self._generator.write_with_svn_branch(cloned_repo_dir, 'svn_tag',
- 'tags/cesm2.0.beta07')
-
- self.execute_checkout_in_dir(cloned_repo_dir,self.checkout_args)
-
- # verify status is still clean and unmodified, last
- # checkout modified the working dir state.
- tree = self.execute_checkout_in_dir(cloned_repo_dir,
- self.verbose_args)
- self._check_tag_branch_svn_tag_clean(tree)
-
-class TestSubrepoCheckout(BaseTestSysCheckout):
- # Need to store information at setUp time for checking
- # pylint: disable=too-many-instance-attributes
- """Run tests to ensure proper handling of repos with submodules.
-
- By default, submodules in git repositories are checked out. A git
- repository checked out as a submodule is treated as if it was
- listed in an external with the same properties as in the source
- .gitmodules file.
- """
-
- def setUp(self):
- """Setup for all submodule checkout tests
- Create a repo with two submodule repositories.
- """
-
- # Run the basic setup
- super().setUp()
- # create test repo
- # We need to do this here (rather than have a static repo) because
- # git submodules do not allow for variables in .gitmodules files
- self._test_repo_name = 'test_repo_with_submodules'
- self._bare_branch_name = 'subrepo_branch'
- self._config_branch_name = 'subrepo_config_branch'
- self._container_extern_name = 'externals_container.cfg'
- self._my_test_dir = os.path.join(module_tmp_root_dir, self._test_id)
- self._repo_dir = os.path.join(self._my_test_dir, self._test_repo_name)
- self._checkout_dir = 'repo_with_submodules'
- check_dir = self.clone_test_repo(CONTAINER_REPO,
- dest_dir_in=self._repo_dir)
- self.assertTrue(self._repo_dir == check_dir)
- # Add the submodules
- cwd = os.getcwd()
- fork_repo_dir = os.path.join(self._bare_root, SIMPLE_FORK_REPO)
- simple_repo_dir = os.path.join(self._bare_root, SIMPLE_REPO)
- self._simple_ext_fork_name = os.path.splitext(SIMPLE_FORK_REPO)[0]
- self._simple_ext_name = os.path.join('sourc',
- os.path.splitext(SIMPLE_REPO)[0])
- os.chdir(self._repo_dir)
- # Add a branch with a subrepo
- cmd = ['git', 'branch', self._bare_branch_name, 'master']
- execute_subprocess(cmd)
- cmd = ['git', 'checkout', self._bare_branch_name]
- execute_subprocess(cmd)
- cmd = ['git', '-c', 'protocol.file.allow=always','submodule', 'add', fork_repo_dir]
- execute_subprocess(cmd)
- cmd = ['git', 'commit', '-am', "'Added simple-ext-fork as a submodule'"]
- execute_subprocess(cmd)
- # Save the fork repo hash for comparison
- os.chdir(self._simple_ext_fork_name)
- self._fork_hash_check = self.get_git_hash()
- os.chdir(self._repo_dir)
- # Now, create a branch to test from_sbmodule
- cmd = ['git', 'branch',
- self._config_branch_name, self._bare_branch_name]
- execute_subprocess(cmd)
- cmd = ['git', 'checkout', self._config_branch_name]
- execute_subprocess(cmd)
- cmd = ['git', '-c', 'protocol.file.allow=always', 'submodule', 'add', '--name', SIMPLE_REPO,
- simple_repo_dir, self._simple_ext_name]
- execute_subprocess(cmd)
- # Checkout feature2
- os.chdir(self._simple_ext_name)
- cmd = ['git', 'branch', 'feature2', 'origin/feature2']
- execute_subprocess(cmd)
- cmd = ['git', 'checkout', 'feature2']
- execute_subprocess(cmd)
- # Save the fork repo hash for comparison
- self._simple_hash_check = self.get_git_hash()
- os.chdir(self._repo_dir)
- self.write_externals_config(filename=self._container_extern_name,
- dest_dir=self._repo_dir, from_submodule=True)
- cmd = ['git', 'add', self._container_extern_name]
- execute_subprocess(cmd)
- cmd = ['git', 'commit', '-am', "'Added simple-ext as a submodule'"]
- execute_subprocess(cmd)
- # Reset to master
- cmd = ['git', 'checkout', 'master']
- execute_subprocess(cmd)
- os.chdir(cwd)
-
- @staticmethod
- def get_git_hash(revision="HEAD"):
- """Return the hash for """
- cmd = ['git', 'rev-parse', revision]
- git_out = execute_subprocess(cmd, output_to_caller=True)
- return git_out.strip()
-
- def write_externals_config(self, name='', dest_dir=None,
- filename=CFG_NAME,
- branch_name=None, sub_externals=None,
- from_submodule=False):
- # pylint: disable=too-many-arguments
- """Create a container externals file with only simple externals.
-
- """
- self._generator.create_config()
-
- if dest_dir is None:
- dest_dir = self._my_test_dir
-
- if from_submodule:
- self._generator.create_section(SIMPLE_FORK_REPO,
- self._simple_ext_fork_name,
- from_submodule=True)
- self._generator.create_section(SIMPLE_REPO,
- self._simple_ext_name,
- branch='feature3', path='',
- from_submodule=False)
- else:
- if branch_name is None:
- branch_name = 'master'
-
- self._generator.create_section(self._test_repo_name,
- self._checkout_dir,
- branch=branch_name,
- path=name, sub_externals=sub_externals,
- repo_path_abs=self._repo_dir)
-
- self._generator.write_config(dest_dir, filename=filename)
-
- def idempotence_check(self, checkout_dir):
- """Verify that calling checkout_externals and
- checkout_externals --status does not cause errors"""
- cwd = os.getcwd()
- os.chdir(checkout_dir)
- self.execute_checkout_in_dir(self._my_test_dir,
- self.checkout_args)
- self.execute_checkout_in_dir(self._my_test_dir,
- self.status_args)
- os.chdir(cwd)
-
- def test_submodule_checkout_bare(self):
- """Verify that a git repo with submodule is properly checked out
- This test if for where there is no 'externals' keyword in the
- parent repo.
- Correct behavior is that the submodule is checked out using
- normal git submodule behavior.
- """
- simple_ext_fork_tag = "(tag1)"
- simple_ext_fork_status = " "
- self.write_externals_config(branch_name=self._bare_branch_name)
- self.execute_checkout_in_dir(self._my_test_dir,
- self.checkout_args)
- cwd = os.getcwd()
- checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir)
- fork_file = os.path.join(checkout_dir,
- self._simple_ext_fork_name, "readme.txt")
- self.assertTrue(os.path.exists(fork_file))
-
- submods = git_submodule_status(checkout_dir)
- print('checking status of', checkout_dir, ':', submods)
- self.assertEqual(len(submods.keys()), 1)
- self.assertTrue(self._simple_ext_fork_name in submods)
- submod = submods[self._simple_ext_fork_name]
- self.assertTrue('hash' in submod)
- self.assertEqual(submod['hash'], self._fork_hash_check)
- self.assertTrue('status' in submod)
- self.assertEqual(submod['status'], simple_ext_fork_status)
- self.assertTrue('tag' in submod)
- self.assertEqual(submod['tag'], simple_ext_fork_tag)
- self.idempotence_check(checkout_dir)
-
- def test_submodule_checkout_none(self):
- """Verify that a git repo with submodule is properly checked out
- This test is for when 'externals=None' is in parent repo's
- externals cfg file.
- Correct behavior is the submodle is not checked out.
- """
- self.write_externals_config(branch_name=self._bare_branch_name,
- sub_externals="none")
- self.execute_checkout_in_dir(self._my_test_dir,
- self.checkout_args)
- cwd = os.getcwd()
- checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir)
- fork_file = os.path.join(checkout_dir,
- self._simple_ext_fork_name, "readme.txt")
- self.assertFalse(os.path.exists(fork_file))
- os.chdir(cwd)
- self.idempotence_check(checkout_dir)
-
- def test_submodule_checkout_config(self): # pylint: disable=too-many-locals
- """Verify that a git repo with submodule is properly checked out
- This test if for when the 'from_submodule' keyword is used in the
- parent repo.
- Correct behavior is that the submodule is checked out using
- normal git submodule behavior.
- """
- tag_check = None # Not checked out as submodule
- status_check = "-" # Not checked out as submodule
- self.write_externals_config(branch_name=self._config_branch_name,
- sub_externals=self._container_extern_name)
- self.execute_checkout_in_dir(self._my_test_dir,
- self.checkout_args)
- cwd = os.getcwd()
- checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir)
- fork_file = os.path.join(checkout_dir,
- self._simple_ext_fork_name, "readme.txt")
- self.assertTrue(os.path.exists(fork_file))
- os.chdir(checkout_dir)
- # Check submodule status
- submods = git_submodule_status(checkout_dir)
- self.assertEqual(len(submods.keys()), 2)
- self.assertTrue(self._simple_ext_fork_name in submods)
- submod = submods[self._simple_ext_fork_name]
- self.assertTrue('hash' in submod)
- self.assertEqual(submod['hash'], self._fork_hash_check)
- self.assertTrue('status' in submod)
- self.assertEqual(submod['status'], status_check)
- self.assertTrue('tag' in submod)
- self.assertEqual(submod['tag'], tag_check)
- self.assertTrue(self._simple_ext_name in submods)
- submod = submods[self._simple_ext_name]
- self.assertTrue('hash' in submod)
- self.assertEqual(submod['hash'], self._simple_hash_check)
- self.assertTrue('status' in submod)
- self.assertEqual(submod['status'], status_check)
- self.assertTrue('tag' in submod)
- self.assertEqual(submod['tag'], tag_check)
- # Check fork repo status
- os.chdir(self._simple_ext_fork_name)
- self.assertEqual(self.get_git_hash(), self._fork_hash_check)
- os.chdir(checkout_dir)
- os.chdir(self._simple_ext_name)
- hash_check = self.get_git_hash('origin/feature3')
- self.assertEqual(self.get_git_hash(), hash_check)
- os.chdir(cwd)
- self.idempotence_check(checkout_dir)
-
-class TestSysCheckoutErrors(BaseTestSysCheckout):
- """Run systems level tests of error conditions in checkout_externals
-
- Error conditions - these tests are designed to trigger specific
- error conditions and ensure that they are being handled as
- runtime errors (and hopefully usefull error messages) instead of
- the default internal message that won't mean anything to the
- user, e.g. key error, called process error, etc.
-
- These are not 'expected failures'. They are pass when a
- RuntimeError is raised, fail if any other error is raised (or no
- error is raised).
-
- """
-
- # NOTE(bja, 2017-11) pylint complains about long method names, but
- # it is hard to differentiate tests without making them more
- # cryptic.
- # pylint: disable=invalid-name
-
- def test_error_unknown_protocol(self):
- """Verify that a runtime error is raised when the user specified repo
- protocol is not known.
-
- """
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # update the config file to point to a different remote with
- # the tag instead of branch. Tag MUST NOT be in the original
- # repo!
- self._generator.write_with_protocol(cloned_repo_dir, BRANCH_SECTION,
- 'this-protocol-does-not-exist')
-
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- def test_error_switch_protocol(self):
- """Verify that a runtime error is raised when the user switches
- protocols, git to svn.
-
- TODO(bja, 2017-11) This correctly results in an error, but it
- isn't a helpful error message.
-
- """
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # update the config file to point to a different remote with
- # the tag instead of branch. Tag MUST NOT be in the original
- # repo!
- self._generator.write_with_protocol(cloned_repo_dir, BRANCH_SECTION, 'svn')
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- def test_error_unknown_tag(self):
- """Verify that a runtime error is raised when the user specified tag
- does not exist.
-
- """
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # update the config file to point to a different remote with
- # the tag instead of branch. Tag MUST NOT be in the original
- # repo!
- self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION,
- tag='this-tag-does-not-exist',
- new_remote_repo_path=SIMPLE_REPO)
-
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- def test_error_overspecify_tag_branch(self):
- """Verify that a runtime error is raised when the user specified both
- tag and a branch
-
- """
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # update the config file to point to a different remote with
- # the tag instead of branch. Tag MUST NOT be in the original
- # repo!
- self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION,
- tag='this-tag-does-not-exist',
- new_remote_repo_path=SIMPLE_REPO,
- remove_branch=False)
-
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- def test_error_underspecify_tag_branch(self):
- """Verify that a runtime error is raised when the user specified
- neither a tag or a branch
-
- """
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # update the config file to point to a different remote with
- # the tag instead of branch. Tag MUST NOT be in the original
- # repo!
- self._generator.write_without_branch_tag(cloned_repo_dir, BRANCH_SECTION)
-
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
- def test_error_missing_url(self):
- """Verify that a runtime error is raised when the user specified
- neither a tag or a branch
-
- """
- # create repo
- cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO)
- self._generator.create_config()
- self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION,
- branch=REMOTE_BRANCH_FEATURE2)
- self._generator.write_config(cloned_repo_dir)
-
- # update the config file to point to a different remote with
- # the tag instead of branch. Tag MUST NOT be in the original
- # repo!
- self._generator.write_without_repo_url(cloned_repo_dir,
- BRANCH_SECTION)
-
- with self.assertRaises(RuntimeError):
- self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_sys_repository_git.py b/manage_externals/test/test_sys_repository_git.py
deleted file mode 100644
index 7e5fb5020d..0000000000
--- a/manage_externals/test/test_sys_repository_git.py
+++ /dev/null
@@ -1,238 +0,0 @@
-#!/usr/bin/env python3
-
-"""Tests of some of the functionality in repository_git.py that actually
-interacts with git repositories.
-
-We're calling these "system" tests because we expect them to be a lot
-slower than most of the unit tests.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import os
-import shutil
-import tempfile
-import unittest
-
-from manic.repository_git import GitRepository
-from manic.externals_description import ExternalsDescription
-from manic.externals_description import ExternalsDescriptionDict
-from manic.utils import execute_subprocess
-
-# NOTE(wjs, 2018-04-09) I find a mix of camel case and underscores to be
-# more readable for unit test names, so I'm disabling pylint's naming
-# convention check
-# pylint: disable=C0103
-
-# Allow access to protected members
-# pylint: disable=W0212
-
-
-class GitTestCase(unittest.TestCase):
- """Adds some git-specific unit test functionality on top of TestCase"""
-
- def assertIsHash(self, maybe_hash):
- """Assert that the string given by maybe_hash really does look
- like a git hash.
- """
-
- # Ensure it is non-empty
- self.assertTrue(maybe_hash, msg="maybe_hash is empty")
-
- # Ensure it has a single string
- self.assertEqual(1, len(maybe_hash.split()),
- msg="maybe_hash has multiple strings: {}".format(maybe_hash))
-
- # Ensure that the only characters in the string are ones allowed
- # in hashes
- allowed_chars_set = set('0123456789abcdef')
- self.assertTrue(set(maybe_hash) <= allowed_chars_set,
- msg="maybe_hash has non-hash characters: {}".format(maybe_hash))
-
-
-class TestGitTestCase(GitTestCase):
- """Tests GitTestCase"""
-
- def test_assertIsHash_true(self):
- """Ensure that assertIsHash passes for something that looks
- like a hash"""
- self.assertIsHash('abc123')
-
- def test_assertIsHash_empty(self):
- """Ensure that assertIsHash raises an AssertionError for an
- empty string"""
- with self.assertRaises(AssertionError):
- self.assertIsHash('')
-
- def test_assertIsHash_multipleStrings(self):
- """Ensure that assertIsHash raises an AssertionError when
- given multiple strings"""
- with self.assertRaises(AssertionError):
- self.assertIsHash('abc123 def456')
-
- def test_assertIsHash_badChar(self):
- """Ensure that assertIsHash raises an AssertionError when given a
- string that has a character that doesn't belong in a hash
- """
- with self.assertRaises(AssertionError):
- self.assertIsHash('abc123g')
-
-
-class TestGitRepositoryGitCommands(GitTestCase):
- """Test some git commands in RepositoryGit
-
- It's silly that we need to create a repository in order to test
- these git commands. Much or all of the git functionality that is
- currently in repository_git.py should eventually be moved to a
- separate module that is solely responsible for wrapping git
- commands; that would allow us to test it independently of this
- repository class.
- """
-
- # ========================================================================
- # Test helper functions
- # ========================================================================
-
- def setUp(self):
- # directory we want to return to after the test system and
- # checkout_externals are done cd'ing all over the place.
- self._return_dir = os.getcwd()
-
- self._tmpdir = tempfile.mkdtemp()
- os.chdir(self._tmpdir)
-
- self._name = 'component'
- rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL:
- '/path/to/local/repo',
- ExternalsDescription.TAG:
- 'tag1',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'junk',
- ExternalsDescription.EXTERNALS: '',
- ExternalsDescription.REPO: rdata,
- },
- }
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = GitRepository('test', repo)
-
- def tearDown(self):
- # return to our common starting point
- os.chdir(self._return_dir)
-
- shutil.rmtree(self._tmpdir, ignore_errors=True)
-
- @staticmethod
- def make_cwd_git_repo():
- """Turn the current directory into an empty git repository"""
- execute_subprocess(['git', 'init'])
-
- @staticmethod
- def add_cwd_git_commit():
- """Add a git commit in the current directory"""
- with open('README', 'a') as myfile:
- myfile.write('more info')
- execute_subprocess(['git', 'add', 'README'])
- execute_subprocess(['git', 'commit', '-m', 'my commit message'])
-
- @staticmethod
- def checkout_cwd_git_branch(branchname):
- """Checkout a new branch in the current directory"""
- execute_subprocess(['git', 'checkout', '-b', branchname])
-
- @staticmethod
- def make_cwd_git_tag(tagname):
- """Make a lightweight tag at the current commit"""
- execute_subprocess(['git', 'tag', '-m', 'making a tag', tagname])
-
- @staticmethod
- def checkout_cwd_ref(refname):
- """Checkout the given refname in the current directory"""
- execute_subprocess(['git', 'checkout', refname])
-
- # ========================================================================
- # Begin actual tests
- # ========================================================================
-
- def test_currentHash_returnsHash(self):
- """Ensure that the _git_current_hash function returns a hash"""
- self.make_cwd_git_repo()
- self.add_cwd_git_commit()
- hash_found, myhash = self._repo._git_current_hash(os.getcwd())
- self.assertTrue(hash_found)
- self.assertIsHash(myhash)
-
- def test_currentHash_outsideGitRepo(self):
- """Ensure that the _git_current_hash function returns False when
- outside a git repository"""
- hash_found, myhash = self._repo._git_current_hash(os.getcwd())
- self.assertFalse(hash_found)
- self.assertEqual('', myhash)
-
- def test_currentBranch_onBranch(self):
- """Ensure that the _git_current_branch function returns the name
- of the branch"""
- self.make_cwd_git_repo()
- self.add_cwd_git_commit()
- self.checkout_cwd_git_branch('foo')
- branch_found, mybranch = self._repo._git_current_branch(os.getcwd())
- self.assertTrue(branch_found)
- self.assertEqual('foo', mybranch)
-
- def test_currentBranch_notOnBranch(self):
- """Ensure that the _git_current_branch function returns False
- when not on a branch"""
- self.make_cwd_git_repo()
- self.add_cwd_git_commit()
- self.make_cwd_git_tag('mytag')
- self.checkout_cwd_ref('mytag')
- branch_found, mybranch = self._repo._git_current_branch(os.getcwd())
- self.assertFalse(branch_found)
- self.assertEqual('', mybranch)
-
- def test_currentBranch_outsideGitRepo(self):
- """Ensure that the _git_current_branch function returns False
- when outside a git repository"""
- branch_found, mybranch = self._repo._git_current_branch(os.getcwd())
- self.assertFalse(branch_found)
- self.assertEqual('', mybranch)
-
- def test_currentTag_onTag(self):
- """Ensure that the _git_current_tag function returns the name of
- the tag"""
- self.make_cwd_git_repo()
- self.add_cwd_git_commit()
- self.make_cwd_git_tag('some_tag')
- tag_found, mytag = self._repo._git_current_tag(os.getcwd())
- self.assertTrue(tag_found)
- self.assertEqual('some_tag', mytag)
-
- def test_currentTag_notOnTag(self):
- """Ensure tha the _git_current_tag function returns False when
- not on a tag"""
- self.make_cwd_git_repo()
- self.add_cwd_git_commit()
- self.make_cwd_git_tag('some_tag')
- self.add_cwd_git_commit()
- tag_found, mytag = self._repo._git_current_tag(os.getcwd())
- self.assertFalse(tag_found)
- self.assertEqual('', mytag)
-
- def test_currentTag_outsideGitRepo(self):
- """Ensure that the _git_current_tag function returns False when
- outside a git repository"""
- tag_found, mytag = self._repo._git_current_tag(os.getcwd())
- self.assertFalse(tag_found)
- self.assertEqual('', mytag)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_unit_externals_description.py b/manage_externals/test/test_unit_externals_description.py
deleted file mode 100644
index 30e5288499..0000000000
--- a/manage_externals/test/test_unit_externals_description.py
+++ /dev/null
@@ -1,478 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for checkout_externals
-
-Note: this script assume the path to the checkout_externals.py module is
-already in the python path.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import os
-import os.path
-import shutil
-import unittest
-
-try:
- # python2
- from ConfigParser import SafeConfigParser as config_parser
-
- def config_string_cleaner(text):
- """convert strings into unicode
- """
- return text.decode('utf-8')
-except ImportError:
- # python3
- from configparser import ConfigParser as config_parser
-
- def config_string_cleaner(text):
- """Python3 already uses unicode strings, so just return the string
- without modification.
-
- """
- return text
-
-from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM
-from manic.externals_description import ExternalsDescription
-from manic.externals_description import ExternalsDescriptionDict
-from manic.externals_description import ExternalsDescriptionConfigV1
-from manic.externals_description import get_cfg_schema_version
-from manic.externals_description import read_externals_description_file
-from manic.externals_description import create_externals_description
-
-from manic.global_constants import EMPTY_STR
-
-
-class TestCfgSchemaVersion(unittest.TestCase):
- """Test that schema identification for the externals description
- returns the correct results.
-
- """
-
- def setUp(self):
- """Reusable config object
- """
- self._config = config_parser()
- self._config.add_section('section1')
- self._config.set('section1', 'keword', 'value')
-
- self._config.add_section(DESCRIPTION_SECTION)
-
- def test_schema_version_valid(self):
- """Test that schema identification returns the correct version for a
- valid tag.
-
- """
- version_str = '2.1.3'
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, version_str)
- major, minor, patch = get_cfg_schema_version(self._config)
- expected_major = 2
- expected_minor = 1
- expected_patch = 3
- self.assertEqual(expected_major, major)
- self.assertEqual(expected_minor, minor)
- self.assertEqual(expected_patch, patch)
-
- def test_schema_section_missing(self):
- """Test that an error is returned if the schema section is missing
- from the input file.
-
- """
- self._config.remove_section(DESCRIPTION_SECTION)
- with self.assertRaises(RuntimeError):
- get_cfg_schema_version(self._config)
-
- def test_schema_version_missing(self):
- """Test that a externals description file without a version raises a
- runtime error.
-
- """
- # Note: the default setup method shouldn't include a version
- # keyword, but remove it just to be future proof....
- self._config.remove_option(DESCRIPTION_SECTION, VERSION_ITEM)
- with self.assertRaises(RuntimeError):
- get_cfg_schema_version(self._config)
-
- def test_schema_version_not_int(self):
- """Test that a externals description file a version that doesn't
- decompose to integer major, minor and patch versions raises
- runtime error.
-
- """
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, 'unknown')
- with self.assertRaises(RuntimeError):
- get_cfg_schema_version(self._config)
-
-
-class TestModelDescritionConfigV1(unittest.TestCase):
- """Test that parsing config/ini fileproduces a correct dictionary
- for the externals description.
-
- """
- # pylint: disable=R0902
-
- def setUp(self):
- """Boiler plate construction of string containing xml for multiple components.
- """
- self._comp1_name = 'comp1'
- self._comp1_path = 'path/to/comp1'
- self._comp1_protocol = 'svn'
- self._comp1_url = 'https://svn.somewhere.com/path/of/comp1'
- self._comp1_tag = 'a_nice_tag_v1'
- self._comp1_is_required = 'True'
- self._comp1_externals = ''
-
- self._comp2_name = 'comp2'
- self._comp2_path = 'path/to/comp2'
- self._comp2_protocol = 'git'
- self._comp2_url = '/local/clone/of/comp2'
- self._comp2_branch = 'a_very_nice_branch'
- self._comp2_is_required = 'False'
- self._comp2_externals = 'path/to/comp2.cfg'
-
- def _setup_comp1(self, config):
- """Boiler plate construction of xml string for componet 1
- """
- config.add_section(self._comp1_name)
- config.set(self._comp1_name, 'local_path', self._comp1_path)
- config.set(self._comp1_name, 'protocol', self._comp1_protocol)
- config.set(self._comp1_name, 'repo_url', self._comp1_url)
- config.set(self._comp1_name, 'tag', self._comp1_tag)
- config.set(self._comp1_name, 'required', self._comp1_is_required)
-
- def _setup_comp2(self, config):
- """Boiler plate construction of xml string for componet 2
- """
- config.add_section(self._comp2_name)
- config.set(self._comp2_name, 'local_path', self._comp2_path)
- config.set(self._comp2_name, 'protocol', self._comp2_protocol)
- config.set(self._comp2_name, 'repo_url', self._comp2_url)
- config.set(self._comp2_name, 'branch', self._comp2_branch)
- config.set(self._comp2_name, 'required', self._comp2_is_required)
- config.set(self._comp2_name, 'externals', self._comp2_externals)
-
- @staticmethod
- def _setup_externals_description(config):
- """Add the required exernals description section
- """
-
- config.add_section(DESCRIPTION_SECTION)
- config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.1')
-
- def _check_comp1(self, model):
- """Test that component one was constructed correctly.
- """
- self.assertTrue(self._comp1_name in model)
- comp1 = model[self._comp1_name]
- self.assertEqual(comp1[ExternalsDescription.PATH], self._comp1_path)
- self.assertTrue(comp1[ExternalsDescription.REQUIRED])
- repo = comp1[ExternalsDescription.REPO]
- self.assertEqual(repo[ExternalsDescription.PROTOCOL],
- self._comp1_protocol)
- self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp1_url)
- self.assertEqual(repo[ExternalsDescription.TAG], self._comp1_tag)
- self.assertEqual(EMPTY_STR, comp1[ExternalsDescription.EXTERNALS])
-
- def _check_comp2(self, model):
- """Test that component two was constucted correctly.
- """
- self.assertTrue(self._comp2_name in model)
- comp2 = model[self._comp2_name]
- self.assertEqual(comp2[ExternalsDescription.PATH], self._comp2_path)
- self.assertFalse(comp2[ExternalsDescription.REQUIRED])
- repo = comp2[ExternalsDescription.REPO]
- self.assertEqual(repo[ExternalsDescription.PROTOCOL],
- self._comp2_protocol)
- self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp2_url)
- self.assertEqual(repo[ExternalsDescription.BRANCH], self._comp2_branch)
- self.assertEqual(self._comp2_externals,
- comp2[ExternalsDescription.EXTERNALS])
-
- def test_one_tag_required(self):
- """Test that a component source with a tag is correctly parsed.
- """
- config = config_parser()
- self._setup_comp1(config)
- self._setup_externals_description(config)
- model = ExternalsDescriptionConfigV1(config)
- print(model)
- self._check_comp1(model)
-
- def test_one_branch_externals(self):
- """Test that a component source with a branch is correctly parsed.
- """
- config = config_parser()
- self._setup_comp2(config)
- self._setup_externals_description(config)
- model = ExternalsDescriptionConfigV1(config)
- print(model)
- self._check_comp2(model)
-
- def test_two_sources(self):
- """Test that multiple component sources are correctly parsed.
- """
- config = config_parser()
- self._setup_comp1(config)
- self._setup_comp2(config)
- self._setup_externals_description(config)
- model = ExternalsDescriptionConfigV1(config)
- print(model)
- self._check_comp1(model)
- self._check_comp2(model)
-
- def test_cfg_v1_reject_unknown_item(self):
- """Test that a v1 description object will reject unknown items
- """
- config = config_parser()
- self._setup_comp1(config)
- self._setup_externals_description(config)
- config.set(self._comp1_name, 'junk', 'foobar')
- with self.assertRaises(RuntimeError):
- ExternalsDescriptionConfigV1(config)
-
- def test_cfg_v1_reject_v2(self):
- """Test that a v1 description object won't try to parse a v2 file.
- """
- config = config_parser()
- self._setup_comp1(config)
- self._setup_externals_description(config)
- config.set(DESCRIPTION_SECTION, VERSION_ITEM, '2.0.1')
- with self.assertRaises(RuntimeError):
- ExternalsDescriptionConfigV1(config)
-
- def test_cfg_v1_reject_v1_too_new(self):
- """Test that a v1 description object won't try to parse a v2 file.
- """
- config = config_parser()
- self._setup_comp1(config)
- self._setup_externals_description(config)
- config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.100.0')
- with self.assertRaises(RuntimeError):
- ExternalsDescriptionConfigV1(config)
-
-
-class TestReadExternalsDescription(unittest.TestCase):
- """Test the application logic of read_externals_description_file
- """
- TMP_FAKE_DIR = 'fake'
-
- def setUp(self):
- """Setup directory for tests
- """
- if not os.path.exists(self.TMP_FAKE_DIR):
- os.makedirs(self.TMP_FAKE_DIR)
-
- def tearDown(self):
- """Cleanup tmp stuff on the file system
- """
- if os.path.exists(self.TMP_FAKE_DIR):
- shutil.rmtree(self.TMP_FAKE_DIR)
-
- def test_no_file_error(self):
- """Test that a runtime error is raised when the file does not exist
-
- """
- root_dir = os.getcwd()
- filename = 'this-file-should-not-exist'
- with self.assertRaises(RuntimeError):
- read_externals_description_file(root_dir, filename)
-
- def test_no_dir_error(self):
- """Test that a runtime error is raised when the file does not exist
-
- """
- root_dir = '/path/to/some/repo'
- filename = 'externals.cfg'
- with self.assertRaises(RuntimeError):
- read_externals_description_file(root_dir, filename)
-
- def test_no_invalid_error(self):
- """Test that a runtime error is raised when the file format is invalid
-
- """
- root_dir = os.getcwd()
- filename = 'externals.cfg'
- file_path = os.path.join(root_dir, filename)
- file_path = os.path.abspath(file_path)
- contents = """
-
-invalid file format
-"""
- with open(file_path, 'w') as fhandle:
- fhandle.write(contents)
- with self.assertRaises(RuntimeError):
- read_externals_description_file(root_dir, filename)
- os.remove(file_path)
-
-
-class TestCreateExternalsDescription(unittest.TestCase):
- """Test the application logic of creat_externals_description
- """
-
- def setUp(self):
- """Create config object used as basis for all tests
- """
- self._config = config_parser()
- self._gmconfig = config_parser()
- self.setup_config()
-
- def setup_config(self):
- """Boiler plate construction of xml string for componet 1
- """
- # Create a standard externals config with a single external
- name = 'test'
- self._config.add_section(name)
- self._config.set(name, ExternalsDescription.PATH, 'externals')
- self._config.set(name, ExternalsDescription.PROTOCOL, 'git')
- self._config.set(name, ExternalsDescription.REPO_URL, '/path/to/repo')
- self._config.set(name, ExternalsDescription.TAG, 'test_tag')
- self._config.set(name, ExternalsDescription.REQUIRED, 'True')
-
- self._config.add_section(DESCRIPTION_SECTION)
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0')
-
- # Create a .gitmodules test
- name = 'submodule "gitmodules_test"'
- self._gmconfig.add_section(name)
- self._gmconfig.set(name, "path", 'externals/test')
- self._gmconfig.set(name, "url", '/path/to/repo')
- # NOTE(goldy, 2019-03) Should test other possible keywords such as
- # fetchRecurseSubmodules, ignore, and shallow
-
- @staticmethod
- def setup_dict_config():
- """Create the full container dictionary with simple and mixed use
- externals
-
- """
- rdatat = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL: 'simple-ext.git',
- ExternalsDescription.TAG: 'tag1'}
- rdatab = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL: 'simple-ext.git',
- ExternalsDescription.BRANCH: 'feature2'}
- rdatam = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL: 'mixed-cont-ext.git',
- ExternalsDescription.BRANCH: 'master'}
- desc = {'simp_tag': {ExternalsDescription.REQUIRED: True,
- ExternalsDescription.PATH: 'simp_tag',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdatat},
- 'simp_branch' : {ExternalsDescription.REQUIRED: True,
- ExternalsDescription.PATH: 'simp_branch',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdatab},
- 'simp_opt': {ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'simp_opt',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdatat},
- 'mixed_req': {ExternalsDescription.REQUIRED: True,
- ExternalsDescription.PATH: 'mixed_req',
- ExternalsDescription.EXTERNALS: 'sub-ext.cfg',
- ExternalsDescription.REPO: rdatam}}
-
- return desc
-
- def test_cfg_v1_ok(self):
- """Test that a correct cfg v1 object is created by create_externals_description
-
- """
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.3')
- ext = create_externals_description(self._config, model_format='cfg')
- self.assertIsInstance(ext, ExternalsDescriptionConfigV1)
-
- def test_cfg_v1_unknown_version(self):
- """Test that a config file with unknown schema version is rejected by
- create_externals_description.
-
- """
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '100.0.3')
- with self.assertRaises(RuntimeError):
- create_externals_description(self._config, model_format='cfg')
-
- def test_dict(self):
- """Test that a correct cfg v1 object is created by create_externals_description
-
- """
- rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL: '/path/to/repo',
- ExternalsDescription.TAG: 'tagv1',
- }
-
- desc = {
- 'test': {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: '../fake',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdata, },
- }
-
- ext = create_externals_description(desc, model_format='dict')
- self.assertIsInstance(ext, ExternalsDescriptionDict)
-
- def test_cfg_component_dict(self):
- """Verify that create_externals_description works with a dictionary
- """
- # create the top level externals file
- desc = self.setup_dict_config()
- # Check external with all repos
- external = create_externals_description(desc, model_format='dict')
- self.assertIsInstance(external, ExternalsDescriptionDict)
- self.assertTrue('simp_tag' in external)
- self.assertTrue('simp_branch' in external)
- self.assertTrue('simp_opt' in external)
- self.assertTrue('mixed_req' in external)
-
- def test_cfg_exclude_component_dict(self):
- """Verify that exclude component checkout works with a dictionary
- """
- # create the top level externals file
- desc = self.setup_dict_config()
- # Test an excluded repo
- external = create_externals_description(desc, model_format='dict',
- exclude=['simp_tag',
- 'simp_opt'])
- self.assertIsInstance(external, ExternalsDescriptionDict)
- self.assertFalse('simp_tag' in external)
- self.assertTrue('simp_branch' in external)
- self.assertFalse('simp_opt' in external)
- self.assertTrue('mixed_req' in external)
-
- def test_cfg_opt_component_dict(self):
- """Verify that exclude component checkout works with a dictionary
- """
- # create the top level externals file
- desc = self.setup_dict_config()
- # Test an excluded repo
- external = create_externals_description(desc, model_format='dict',
- components=['simp_tag',
- 'simp_opt'])
- self.assertIsInstance(external, ExternalsDescriptionDict)
- self.assertTrue('simp_tag' in external)
- self.assertFalse('simp_branch' in external)
- self.assertTrue('simp_opt' in external)
- self.assertFalse('mixed_req' in external)
-
- def test_cfg_unknown_version(self):
- """Test that a runtime error is raised when an unknown file version is
- received
-
- """
- self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '123.456.789')
- with self.assertRaises(RuntimeError):
- create_externals_description(self._config, model_format='cfg')
-
- def test_cfg_unknown_format(self):
- """Test that a runtime error is raised when an unknown format string is
- received
-
- """
- with self.assertRaises(RuntimeError):
- create_externals_description(self._config, model_format='unknown')
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_unit_externals_status.py b/manage_externals/test/test_unit_externals_status.py
deleted file mode 100644
index f019514e9e..0000000000
--- a/manage_externals/test/test_unit_externals_status.py
+++ /dev/null
@@ -1,299 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for the manic external status reporting module.
-
-Note: this script assumes the path to the manic package is already in
-the python path.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import unittest
-
-from manic.externals_status import ExternalStatus
-
-
-class TestStatusObject(unittest.TestCase):
- """Verify that the Status object behaives as expected.
- """
-
- def test_exists_empty_all(self):
- """If the repository sync-state is empty (doesn't exist), and there is no
- clean state, then it is considered not to exist.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.EMPTY
- stat.clean_state = ExternalStatus.DEFAULT
- exists = stat.exists()
- self.assertFalse(exists)
-
- stat.clean_state = ExternalStatus.EMPTY
- exists = stat.exists()
- self.assertFalse(exists)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- exists = stat.exists()
- self.assertFalse(exists)
-
- # this state represtens an internal logic error in how the
- # repo status was determined.
- stat.clean_state = ExternalStatus.STATUS_OK
- exists = stat.exists()
- self.assertTrue(exists)
-
- # this state represtens an internal logic error in how the
- # repo status was determined.
- stat.clean_state = ExternalStatus.DIRTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- def test_exists_default_all(self):
- """If the repository sync-state is default, then it is considered to exist
- regardless of clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.DEFAULT
- stat.clean_state = ExternalStatus.DEFAULT
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.EMPTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.DIRTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- def test_exists_unknown_all(self):
- """If the repository sync-state is unknown, then it is considered to exist
- regardless of clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.UNKNOWN
- stat.clean_state = ExternalStatus.DEFAULT
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.EMPTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.DIRTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- def test_exists_modified_all(self):
- """If the repository sync-state is modified, then it is considered to exist
- regardless of clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.MODEL_MODIFIED
- stat.clean_state = ExternalStatus.DEFAULT
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.EMPTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.DIRTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- def test_exists_ok_all(self):
- """If the repository sync-state is ok, then it is considered to exist
- regardless of clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.STATUS_OK
- stat.clean_state = ExternalStatus.DEFAULT
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.EMPTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- exists = stat.exists()
- self.assertTrue(exists)
-
- stat.clean_state = ExternalStatus.DIRTY
- exists = stat.exists()
- self.assertTrue(exists)
-
- def test_update_ok_all(self):
- """If the repository in-sync is ok, then it is safe to
- update only if clean state is ok
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.STATUS_OK
- stat.clean_state = ExternalStatus.DEFAULT
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.EMPTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- safe_to_update = stat.safe_to_update()
- self.assertTrue(safe_to_update)
-
- stat.clean_state = ExternalStatus.DIRTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- def test_update_modified_all(self):
- """If the repository in-sync is modified, then it is safe to
- update only if clean state is ok
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.MODEL_MODIFIED
- stat.clean_state = ExternalStatus.DEFAULT
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.EMPTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- safe_to_update = stat.safe_to_update()
- self.assertTrue(safe_to_update)
-
- stat.clean_state = ExternalStatus.DIRTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- def test_update_unknown_all(self):
- """If the repository in-sync is unknown, then it is not safe to
- update, regardless of the clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.UNKNOWN
- stat.clean_state = ExternalStatus.DEFAULT
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.EMPTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.DIRTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- def test_update_default_all(self):
- """If the repository in-sync is default, then it is not safe to
- update, regardless of the clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.UNKNOWN
- stat.clean_state = ExternalStatus.DEFAULT
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.EMPTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.DIRTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- def test_update_empty_all(self):
- """If the repository in-sync is empty, then it is not safe to
- update, regardless of the clean state.
-
- """
- stat = ExternalStatus()
- stat.sync_state = ExternalStatus.UNKNOWN
- stat.clean_state = ExternalStatus.DEFAULT
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.EMPTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.UNKNOWN
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.STATUS_OK
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
- stat.clean_state = ExternalStatus.DIRTY
- safe_to_update = stat.safe_to_update()
- self.assertFalse(safe_to_update)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_unit_repository.py b/manage_externals/test/test_unit_repository.py
deleted file mode 100644
index 1b93861834..0000000000
--- a/manage_externals/test/test_unit_repository.py
+++ /dev/null
@@ -1,208 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for checkout_externals
-
-Note: this script assume the path to the checkout_externals.py module is
-already in the python path.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import unittest
-
-from manic.repository_factory import create_repository
-from manic.repository_git import GitRepository
-from manic.repository_svn import SvnRepository
-from manic.repository import Repository
-from manic.externals_description import ExternalsDescription
-from manic.global_constants import EMPTY_STR
-
-
-class TestCreateRepositoryDict(unittest.TestCase):
- """Test the create_repository functionality to ensure it returns the
- propper type of repository and errors for unknown repository
- types.
-
- """
-
- def setUp(self):
- """Common data needed for all tests in this class
- """
- self._name = 'test_name'
- self._repo = {ExternalsDescription.PROTOCOL: None,
- ExternalsDescription.REPO_URL: 'junk_root',
- ExternalsDescription.TAG: 'junk_tag',
- ExternalsDescription.BRANCH: EMPTY_STR,
- ExternalsDescription.HASH: EMPTY_STR,
- ExternalsDescription.SPARSE: EMPTY_STR, }
-
- def test_create_repo_git(self):
- """Verify that several possible names for the 'git' protocol
- create git repository objects.
-
- """
- protocols = ['git', 'GIT', 'Git', ]
- for protocol in protocols:
- self._repo[ExternalsDescription.PROTOCOL] = protocol
- repo = create_repository(self._name, self._repo)
- self.assertIsInstance(repo, GitRepository)
-
- def test_create_repo_svn(self):
- """Verify that several possible names for the 'svn' protocol
- create svn repository objects.
- """
- protocols = ['svn', 'SVN', 'Svn', ]
- for protocol in protocols:
- self._repo[ExternalsDescription.PROTOCOL] = protocol
- repo = create_repository(self._name, self._repo)
- self.assertIsInstance(repo, SvnRepository)
-
- def test_create_repo_externals_only(self):
- """Verify that an externals only repo returns None.
- """
- protocols = ['externals_only', ]
- for protocol in protocols:
- self._repo[ExternalsDescription.PROTOCOL] = protocol
- repo = create_repository(self._name, self._repo)
- self.assertEqual(None, repo)
-
- def test_create_repo_unsupported(self):
- """Verify that an unsupported protocol generates a runtime error.
- """
- protocols = ['not_a_supported_protocol', ]
- for protocol in protocols:
- self._repo[ExternalsDescription.PROTOCOL] = protocol
- with self.assertRaises(RuntimeError):
- create_repository(self._name, self._repo)
-
-
-class TestRepository(unittest.TestCase):
- """Test the externals description processing used to create the Repository
- base class shared by protocol specific repository classes.
-
- """
-
- def test_tag(self):
- """Test creation of a repository object with a tag
- """
- name = 'test_repo'
- protocol = 'test_protocol'
- url = 'test_url'
- tag = 'test_tag'
- repo_info = {ExternalsDescription.PROTOCOL: protocol,
- ExternalsDescription.REPO_URL: url,
- ExternalsDescription.TAG: tag,
- ExternalsDescription.BRANCH: EMPTY_STR,
- ExternalsDescription.HASH: EMPTY_STR,
- ExternalsDescription.SPARSE: EMPTY_STR, }
- repo = Repository(name, repo_info)
- print(repo.__dict__)
- self.assertEqual(repo.tag(), tag)
- self.assertEqual(repo.url(), url)
-
- def test_branch(self):
- """Test creation of a repository object with a branch
- """
- name = 'test_repo'
- protocol = 'test_protocol'
- url = 'test_url'
- branch = 'test_branch'
- repo_info = {ExternalsDescription.PROTOCOL: protocol,
- ExternalsDescription.REPO_URL: url,
- ExternalsDescription.BRANCH: branch,
- ExternalsDescription.TAG: EMPTY_STR,
- ExternalsDescription.HASH: EMPTY_STR,
- ExternalsDescription.SPARSE: EMPTY_STR, }
- repo = Repository(name, repo_info)
- print(repo.__dict__)
- self.assertEqual(repo.branch(), branch)
- self.assertEqual(repo.url(), url)
-
- def test_hash(self):
- """Test creation of a repository object with a hash
- """
- name = 'test_repo'
- protocol = 'test_protocol'
- url = 'test_url'
- ref = 'deadc0de'
- sparse = EMPTY_STR
- repo_info = {ExternalsDescription.PROTOCOL: protocol,
- ExternalsDescription.REPO_URL: url,
- ExternalsDescription.BRANCH: EMPTY_STR,
- ExternalsDescription.TAG: EMPTY_STR,
- ExternalsDescription.HASH: ref,
- ExternalsDescription.SPARSE: sparse, }
- repo = Repository(name, repo_info)
- print(repo.__dict__)
- self.assertEqual(repo.hash(), ref)
- self.assertEqual(repo.url(), url)
-
- def test_tag_branch(self):
- """Test creation of a repository object with a tag and branch raises a
- runtimer error.
-
- """
- name = 'test_repo'
- protocol = 'test_protocol'
- url = 'test_url'
- branch = 'test_branch'
- tag = 'test_tag'
- ref = EMPTY_STR
- sparse = EMPTY_STR
- repo_info = {ExternalsDescription.PROTOCOL: protocol,
- ExternalsDescription.REPO_URL: url,
- ExternalsDescription.BRANCH: branch,
- ExternalsDescription.TAG: tag,
- ExternalsDescription.HASH: ref,
- ExternalsDescription.SPARSE: sparse, }
- with self.assertRaises(RuntimeError):
- Repository(name, repo_info)
-
- def test_tag_branch_hash(self):
- """Test creation of a repository object with a tag, branch and hash raises a
- runtimer error.
-
- """
- name = 'test_repo'
- protocol = 'test_protocol'
- url = 'test_url'
- branch = 'test_branch'
- tag = 'test_tag'
- ref = 'deadc0de'
- sparse = EMPTY_STR
- repo_info = {ExternalsDescription.PROTOCOL: protocol,
- ExternalsDescription.REPO_URL: url,
- ExternalsDescription.BRANCH: branch,
- ExternalsDescription.TAG: tag,
- ExternalsDescription.HASH: ref,
- ExternalsDescription.SPARSE: sparse, }
- with self.assertRaises(RuntimeError):
- Repository(name, repo_info)
-
- def test_no_tag_no_branch(self):
- """Test creation of a repository object without a tag or branch raises a
- runtimer error.
-
- """
- name = 'test_repo'
- protocol = 'test_protocol'
- url = 'test_url'
- branch = EMPTY_STR
- tag = EMPTY_STR
- ref = EMPTY_STR
- sparse = EMPTY_STR
- repo_info = {ExternalsDescription.PROTOCOL: protocol,
- ExternalsDescription.REPO_URL: url,
- ExternalsDescription.BRANCH: branch,
- ExternalsDescription.TAG: tag,
- ExternalsDescription.HASH: ref,
- ExternalsDescription.SPARSE: sparse, }
- with self.assertRaises(RuntimeError):
- Repository(name, repo_info)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_unit_repository_git.py b/manage_externals/test/test_unit_repository_git.py
deleted file mode 100644
index 1c01098acf..0000000000
--- a/manage_externals/test/test_unit_repository_git.py
+++ /dev/null
@@ -1,811 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for checkout_externals
-
-Note: this script assume the path to the checkout_externals.py module is
-already in the python path.
-
-"""
-# pylint: disable=too-many-lines,protected-access
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import os
-import shutil
-import unittest
-
-from manic.repository_git import GitRepository
-from manic.externals_status import ExternalStatus
-from manic.externals_description import ExternalsDescription
-from manic.externals_description import ExternalsDescriptionDict
-from manic.global_constants import EMPTY_STR
-
-# NOTE(bja, 2017-11) order is important here. origin should be a
-# subset of other to trap errors on processing remotes!
-GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM = '''
-upstream /path/to/other/repo (fetch)
-upstream /path/to/other/repo (push)
-other /path/to/local/repo2 (fetch)
-other /path/to/local/repo2 (push)
-origin /path/to/local/repo (fetch)
-origin /path/to/local/repo (push)
-'''
-
-
-class TestGitRepositoryCurrentRef(unittest.TestCase):
- """test the current_ref command on a git repository
- """
-
- def setUp(self):
- self._name = 'component'
- rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL:
- '/path/to/local/repo',
- ExternalsDescription.TAG:
- 'tag1',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'junk',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdata,
- },
- }
-
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = GitRepository('test', repo)
-
- #
- # mock methods replacing git system calls
- #
- @staticmethod
- def _git_current_branch(branch_found, branch_name):
- """Return a function that takes the place of
- repo._git_current_branch, which returns the given output."""
- def my_git_current_branch(dirname):
- """mock function that can take the place of repo._git_current_branch"""
- return branch_found, branch_name
- return my_git_current_branch
-
- @staticmethod
- def _git_current_tag(tag_found, tag_name):
- """Return a function that takes the place of
- repo._git_current_tag, which returns the given output."""
- def my_git_current_tag(dirname):
- """mock function that can take the place of repo._git_current_tag"""
- return tag_found, tag_name
- return my_git_current_tag
-
- @staticmethod
- def _git_current_hash(hash_found, hash_name):
- """Return a function that takes the place of
- repo._git_current_hash, which returns the given output."""
- def my_git_current_hash(dirname):
- """mock function that can take the place of repo._git_current_hash"""
- return hash_found, hash_name
- return my_git_current_hash
-
- # ------------------------------------------------------------------------
- # Begin tests
- # ------------------------------------------------------------------------
-
- def test_ref_branch(self):
- """Test that we correctly identify we are on a branch
- """
- self._repo._git_current_branch = self._git_current_branch(
- True, 'feature3')
- self._repo._git_current_tag = self._git_current_tag(True, 'foo_tag')
- self._repo._git_current_hash = self._git_current_hash(True, 'abc123')
- expected = 'foo_tag (branch feature3)'
- result = self._repo._current_ref(os.getcwd())
- self.assertEqual(result, expected)
-
- def test_ref_detached_tag(self):
- """Test that we correctly identify that the ref is detached at a tag
- """
- self._repo._git_current_branch = self._git_current_branch(False, '')
- self._repo._git_current_tag = self._git_current_tag(True, 'foo_tag')
- self._repo._git_current_hash = self._git_current_hash(True, 'abc123')
- expected = 'foo_tag'
- result = self._repo._current_ref(os.getcwd())
- self.assertEqual(result, expected)
-
- def test_ref_detached_hash(self):
- """Test that we can identify ref is detached at a hash
-
- """
- self._repo._git_current_branch = self._git_current_branch(False, '')
- self._repo._git_current_tag = self._git_current_tag(False, '')
- self._repo._git_current_hash = self._git_current_hash(True, 'abc123')
- expected = 'abc123'
- result = self._repo._current_ref(os.getcwd())
- self.assertEqual(result, expected)
-
- def test_ref_none(self):
- """Test that we correctly identify that we're not in a git repo.
- """
- self._repo._git_current_branch = self._git_current_branch(False, '')
- self._repo._git_current_tag = self._git_current_tag(False, '')
- self._repo._git_current_hash = self._git_current_hash(False, '')
- result = self._repo._current_ref(os.getcwd())
- self.assertEqual(result, EMPTY_STR)
-
-
-class TestGitRepositoryCheckSync(unittest.TestCase):
- """Test whether the GitRepository _check_sync_logic functionality is
- correct.
-
- Note: there are a lot of combinations of state:
-
- - external description - tag, branch
-
- - working copy
- - doesn't exist (not checked out)
- - exists, no git info - incorrect protocol, e.g. svn, or tarball?
- - exists, git info
- - as expected:
- - different from expected:
- - detached tag,
- - detached hash,
- - detached branch (compare remote and branch),
- - tracking branch (compare remote and branch),
- - same remote
- - different remote
- - untracked branch
-
- Test list:
- - doesn't exist
- - exists no git info
-
- - num_external * (working copy expected + num_working copy different)
- - total tests = 16
-
- """
-
- # NOTE(bja, 2017-11) pylint complains about long method names, but
- # it is hard to differentiate tests without making them more
- # cryptic. Also complains about too many public methods, but it
- # doesn't really make sense to break this up.
- # pylint: disable=invalid-name,too-many-public-methods
-
- TMP_FAKE_DIR = 'fake'
- TMP_FAKE_GIT_DIR = os.path.join(TMP_FAKE_DIR, '.git')
-
- def setUp(self):
- """Setup reusable git repository object
- """
- self._name = 'component'
- rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL:
- '/path/to/local/repo',
- ExternalsDescription.TAG: 'tag1',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: self.TMP_FAKE_DIR,
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdata,
- },
- }
-
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = GitRepository('test', repo)
- # The unit tests here don't care about the result of
- # _current_ref, but we replace it here so that we don't need to
- # worry about calling a possibly slow and possibly
- # error-producing command (since _current_ref calls various git
- # functions):
- self._repo._current_ref = self._current_ref_empty
- self._create_tmp_git_dir()
-
- # We have to override this class method rather than the self._repo
- # instance method because it is called via
- # GitRepository._remote_name_for_url, which is itself a @classmethod
- # calls cls._git_remote_verbose().
- self._orignal_git_remote_verbose = GitRepository._git_remote_verbose
- GitRepository._git_remote_verbose = self._git_remote_origin_upstream
- def tearDown(self):
- """Cleanup tmp stuff on the file system
- """
- self._remove_tmp_git_dir()
-
- GitRepository._git_remote_verbose = self._orignal_git_remote_verbose
-
- def _create_tmp_git_dir(self):
- """Create a temporary fake git directory for testing purposes.
- """
- if not os.path.exists(self.TMP_FAKE_GIT_DIR):
- os.makedirs(self.TMP_FAKE_GIT_DIR)
-
- def _remove_tmp_git_dir(self):
- """Remove the temporary fake git directory
- """
- if os.path.exists(self.TMP_FAKE_DIR):
- shutil.rmtree(self.TMP_FAKE_DIR)
-
- #
- # mock methods replacing git system calls
- #
- @staticmethod
- def _current_ref_empty(dirname):
- """Return an empty string.
-
- Drop-in for GitRepository._current_ref
- """
- return EMPTY_STR
-
- @staticmethod
- def _git_remote_origin_upstream(dirname):
- """Return an info string that is a checkout hash.
-
- Drop-in for GitRepository._git_remote_verbose.
- """
- return GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM
-
- @staticmethod
- def _git_current_hash(myhash):
- """Return a function that takes the place of repo._git_current_hash,
- which returns the given hash
- """
- def my_git_current_hash(dirname):
- """mock function that can take the place of repo._git_current_hash"""
- return 0, myhash
- return my_git_current_hash
-
- def _git_revparse_commit(self, expected_ref, mystatus, myhash):
- """Return a function that takes the place of
- repo._git_revparse_commit, which returns a tuple:
- (mystatus, myhash).
-
- Expects the passed-in ref to equal expected_ref
-
- status = 0 implies success, non-zero implies failure
- """
- def my_git_revparse_commit(ref, dirname):
- """mock function that can take the place of repo._git_revparse_commit"""
- self.assertEqual(expected_ref, ref)
- return mystatus, myhash
- return my_git_revparse_commit
-
- # ----------------------------------------------------------------
- #
- # Tests where working copy doesn't exist or is invalid
- #
- # ----------------------------------------------------------------
- def test_sync_dir_not_exist(self):
- """Test that a directory that doesn't exist returns an error status
-
- Note: the Repository classes should be prevented from ever
- working on an empty directory by the _Source object.
-
- """
- stat = ExternalStatus()
- self._repo._check_sync(stat, 'invalid_directory_name')
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR)
- # check_dir should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_sync_dir_exist_no_git_info(self):
- """Test that a non-existent git repo returns an unknown status
- """
- stat = ExternalStatus()
- self._repo._tag = 'tag1'
- self._repo._git_current_hash = self._git_current_hash('')
- self._repo._git_revparse_commit = self._git_revparse_commit(
- 'tag1', 1, '')
- self._repo._check_sync(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- # ------------------------------------------------------------------------
- #
- # Tests where version in configuration file is not a valid reference
- #
- # ------------------------------------------------------------------------
-
- def test_sync_invalid_reference(self):
- """Test that an invalid reference returns out-of-sync
- """
- stat = ExternalStatus()
- self._repo._tag = 'tag1'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = self._git_revparse_commit(
- 'tag1', 1, '')
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- # ----------------------------------------------------------------
- #
- # Tests where external description specifies a tag
- #
- # ----------------------------------------------------------------
- def test_sync_tag_on_same_hash(self):
- """Test expect tag on same hash --> status ok
-
- """
- stat = ExternalStatus()
- self._repo._tag = 'tag1'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = self._git_revparse_commit(
- 'tag1', 0, 'abc123')
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_sync_tag_on_different_hash(self):
- """Test expect tag on a different hash --> status modified
-
- """
- stat = ExternalStatus()
- self._repo._tag = 'tag1'
- self._repo._git_current_hash = self._git_current_hash('def456')
- self._repo._git_revparse_commit = self._git_revparse_commit(
- 'tag1', 0, 'abc123')
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- # ----------------------------------------------------------------
- #
- # Tests where external description specifies a hash
- #
- # ----------------------------------------------------------------
- def test_sync_hash_on_same_hash(self):
- """Test expect hash on same hash --> status ok
-
- """
- stat = ExternalStatus()
- self._repo._tag = ''
- self._repo._hash = 'abc'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = self._git_revparse_commit(
- 'abc', 0, 'abc123')
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_sync_hash_on_different_hash(self):
- """Test expect hash on a different hash --> status modified
-
- """
- stat = ExternalStatus()
- self._repo._tag = ''
- self._repo._hash = 'abc'
- self._repo._git_current_hash = self._git_current_hash('def456')
- self._repo._git_revparse_commit = self._git_revparse_commit(
- 'abc', 0, 'abc123')
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- # ----------------------------------------------------------------
- #
- # Tests where external description specifies a branch
- #
- # ----------------------------------------------------------------
- def test_sync_branch_on_same_hash(self):
- """Test expect branch on same hash --> status ok
-
- """
- stat = ExternalStatus()
- self._repo._branch = 'feature-2'
- self._repo._tag = ''
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = (
- self._git_revparse_commit('origin/feature-2', 0, 'abc123'))
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_sync_branch_on_diff_hash(self):
- """Test expect branch on diff hash --> status modified
-
- """
- stat = ExternalStatus()
- self._repo._branch = 'feature-2'
- self._repo._tag = ''
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = (
- self._git_revparse_commit('origin/feature-2', 0, 'def456'))
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_sync_branch_diff_remote(self):
- """Test _remote_name_for_url with a different remote
-
- """
- stat = ExternalStatus()
- self._repo._branch = 'feature-2'
- self._repo._tag = ''
- self._repo._url = '/path/to/other/repo'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = (
- self._git_revparse_commit('upstream/feature-2', 0, 'def456'))
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- # The test passes if _git_revparse_commit is called with the
- # expected argument
-
- def test_sync_branch_diff_remote2(self):
- """Test _remote_name_for_url with a different remote
-
- """
- stat = ExternalStatus()
- self._repo._branch = 'feature-2'
- self._repo._tag = ''
- self._repo._url = '/path/to/local/repo2'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = (
- self._git_revparse_commit('other/feature-2', 0, 'def789'))
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- # The test passes if _git_revparse_commit is called with the
- # expected argument
-
- def test_sync_branch_on_unknown_remote(self):
- """Test expect branch, but remote is unknown --> status modified
-
- """
- stat = ExternalStatus()
- self._repo._branch = 'feature-2'
- self._repo._tag = ''
- self._repo._url = '/path/to/unknown/repo'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = (
- self._git_revparse_commit('unknown_remote/feature-2', 1, ''))
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_sync_branch_on_untracked_local(self):
- """Test expect branch, on untracked branch in local repo --> status ok
-
- Setting the externals description to '.' indicates that the
- user only wants to consider the current local repo state
- without fetching from remotes. This is required to preserve
- the current branch of a repository during an update.
-
- """
- stat = ExternalStatus()
- self._repo._branch = 'feature3'
- self._repo._tag = ''
- self._repo._url = '.'
- self._repo._git_current_hash = self._git_current_hash('abc123')
- self._repo._git_revparse_commit = (
- self._git_revparse_commit('feature3', 0, 'abc123'))
- self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
- # check_sync should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
-
-class TestGitStatusPorcelain(unittest.TestCase):
- """Test parsing of output from git status --porcelain=v1 -z
- """
- # pylint: disable=C0103
- GIT_STATUS_PORCELAIN_V1_ALL = (
- r' D INSTALL\0MM Makefile\0M README.md\0R cmakelists.txt\0'
- r'CMakeLists.txt\0D commit-message-template.txt\0A stuff.txt\0'
- r'?? junk.txt')
-
- GIT_STATUS_PORCELAIN_CLEAN = r''
-
- def test_porcelain_status_dirty(self):
- """Verify that git status output is considered dirty when there are
- listed files.
-
- """
- git_output = self.GIT_STATUS_PORCELAIN_V1_ALL
- is_dirty = GitRepository._status_v1z_is_dirty(git_output)
- self.assertTrue(is_dirty)
-
- def test_porcelain_status_clean(self):
- """Verify that git status output is considered clean when there are no
- listed files.
-
- """
- git_output = self.GIT_STATUS_PORCELAIN_CLEAN
- is_dirty = GitRepository._status_v1z_is_dirty(git_output)
- self.assertFalse(is_dirty)
-
-
-class TestGitCreateRemoteName(unittest.TestCase):
- """Test the create_remote_name method on the GitRepository class
- """
-
- def setUp(self):
- """Common infrastructure for testing _create_remote_name
- """
- self._rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL:
- 'empty',
- ExternalsDescription.TAG:
- 'very_useful_tag',
- ExternalsDescription.BRANCH: EMPTY_STR,
- ExternalsDescription.HASH: EMPTY_STR,
- ExternalsDescription.SPARSE: EMPTY_STR, }
- self._repo = GitRepository('test', self._rdata)
-
- def test_remote_git_proto(self):
- """Test remote with git protocol
- """
- self._repo._url = 'git@git.github.com:very_nice_org/useful_repo'
- remote_name = self._repo._create_remote_name()
- self.assertEqual(remote_name, 'very_nice_org_useful_repo')
-
- def test_remote_https_proto(self):
- """Test remote with git protocol
- """
- self._repo._url = 'https://www.github.com/very_nice_org/useful_repo'
- remote_name = self._repo._create_remote_name()
- self.assertEqual(remote_name, 'very_nice_org_useful_repo')
-
- def test_remote_local_abs(self):
- """Test remote with git protocol
- """
- self._repo._url = '/path/to/local/repositories/useful_repo'
- remote_name = self._repo._create_remote_name()
- self.assertEqual(remote_name, 'repositories_useful_repo')
-
- def test_remote_local_rel(self):
- """Test remote with git protocol
- """
- os.environ['TEST_VAR'] = '/my/path/to/repos'
- self._repo._url = '${TEST_VAR}/../../useful_repo'
- remote_name = self._repo._create_remote_name()
- self.assertEqual(remote_name, 'path_useful_repo')
- del os.environ['TEST_VAR']
-
-
-class TestVerifyTag(unittest.TestCase):
- """Test logic verifying that a tag exists and is unique
-
- """
-
- def setUp(self):
- """Setup reusable git repository object
- """
- self._name = 'component'
- rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL:
- '/path/to/local/repo',
- ExternalsDescription.TAG: 'tag1',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'tmp',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdata,
- },
- }
-
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = GitRepository('test', repo)
-
- @staticmethod
- def _shell_true(*args, **kwargs):
- return 0
-
- @staticmethod
- def _shell_false(*args, **kwargs):
- return 1
-
- @staticmethod
- def _mock_revparse_commit(ref, dirname):
- _ = ref
- return (TestValidRef._shell_true, '97ebc0e0deadc0de')
-
- @staticmethod
- def _mock_revparse_commit_false(ref, dirname):
- _ = ref
- return (TestValidRef._shell_false, '97ebc0e0deadc0de')
-
- def test_tag_not_tag_branch_commit(self):
- """Verify a non-tag returns false
- """
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit_false
- self._repo._tag = 'something'
- remote_name = 'origin'
- received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name,
- os.getcwd())
- self.assertFalse(received)
-
- def test_tag_not_tag(self):
- """Verify a non-tag, untracked remote returns false
- """
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_true
- self._repo._git_lsremote_branch = self._shell_true
- self._repo._git_revparse_commit = self._mock_revparse_commit_false
- self._repo._tag = 'tag1'
- remote_name = 'origin'
- received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name,
- os.getcwd())
- self.assertFalse(received)
-
- def test_tag_indeterminant(self):
- """Verify an indeterminant tag/branch returns false
- """
- self._repo._git_showref_tag = self._shell_true
- self._repo._git_showref_branch = self._shell_true
- self._repo._git_lsremote_branch = self._shell_true
- self._repo._git_revparse_commit = self._mock_revparse_commit
- self._repo._tag = 'something'
- remote_name = 'origin'
- received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name,
- os.getcwd())
- self.assertFalse(received)
-
- def test_tag_is_unique(self):
- """Verify a unique tag match returns true
- """
- self._repo._git_showref_tag = self._shell_true
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit
- self._repo._tag = 'tag1'
- remote_name = 'origin'
- received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name,
- os.getcwd())
- self.assertTrue(received)
-
- def test_tag_is_not_hash(self):
- """Verify a commit hash is not classified as a tag
- """
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit
- self._repo._tag = '97ebc0e0'
- remote_name = 'origin'
- received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name,
- os.getcwd())
- self.assertFalse(received)
-
- def test_hash_is_commit(self):
- """Verify a commit hash is not classified as a tag
- """
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit
- self._repo._tag = '97ebc0e0'
- remote_name = 'origin'
- received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name,
- os.getcwd())
- self.assertFalse(received)
-
-
-class TestValidRef(unittest.TestCase):
- """Test logic verifying that a reference is a valid tag, branch or sha1
-
- """
-
- def setUp(self):
- """Setup reusable git repository object
- """
- self._name = 'component'
- rdata = {ExternalsDescription.PROTOCOL: 'git',
- ExternalsDescription.REPO_URL:
- '/path/to/local/repo',
- ExternalsDescription.TAG: 'tag1',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'tmp',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdata,
- },
- }
-
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = GitRepository('test', repo)
-
- @staticmethod
- def _shell_true(url, remote=None):
- _ = url
- _ = remote
- return 0
-
- @staticmethod
- def _shell_false(url, remote=None):
- _ = url
- _ = remote
- return 1
-
- @staticmethod
- def _mock_revparse_commit_false(ref, dirname):
- _ = ref
- return (TestValidRef._shell_false, '')
-
- @staticmethod
- def _mock_revparse_commit_true(ref, dirname):
- _ = ref
- _ = dirname
- return (TestValidRef._shell_true, '')
-
- def test_valid_ref_is_invalid(self):
- """Verify an invalid reference raises an exception
- """
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit_false
- self._repo._tag = 'invalid_ref'
- with self.assertRaises(RuntimeError):
- self._repo._check_for_valid_ref(self._repo._tag,
- remote_name=None,
- dirname=os.getcwd())
-
- def test_valid_tag(self):
- """Verify a valid tag return true
- """
- self._repo._git_showref_tag = self._shell_true
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit_true
- self._repo._tag = 'tag1'
- received = self._repo._check_for_valid_ref(self._repo._tag,
- remote_name=None,
- dirname=os.getcwd())
- self.assertTrue(received)
-
- def test_valid_branch(self):
- """Verify a valid tag return true
- """
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_true
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = self._mock_revparse_commit_true
- self._repo._tag = 'tag1'
- received = self._repo._check_for_valid_ref(self._repo._tag,
- remote_name=None,
- dirname=os.getcwd())
- self.assertTrue(received)
-
- def test_valid_hash(self):
- """Verify a valid hash return true
- """
- def _mock_revparse_commit_true(ref, dirname):
- _ = ref
- return (0, '56cc0b539426eb26810af9e')
-
- self._repo._git_showref_tag = self._shell_false
- self._repo._git_showref_branch = self._shell_false
- self._repo._git_lsremote_branch = self._shell_false
- self._repo._git_revparse_commit = _mock_revparse_commit_true
- self._repo._hash = '56cc0b5394'
- received = self._repo._check_for_valid_ref(self._repo._hash,
- remote_name=None,
- dirname=os.getcwd())
- self.assertTrue(received)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_unit_repository_svn.py b/manage_externals/test/test_unit_repository_svn.py
deleted file mode 100755
index d9309df7f6..0000000000
--- a/manage_externals/test/test_unit_repository_svn.py
+++ /dev/null
@@ -1,501 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for checkout_externals
-
-Note: this script assume the path to the checkout_externals.py module is
-already in the python path.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import unittest
-
-from manic.repository_svn import SvnRepository
-from manic.externals_status import ExternalStatus
-from manic.externals_description import ExternalsDescription
-from manic.externals_description import ExternalsDescriptionDict
-from manic.global_constants import EMPTY_STR
-
-# pylint: disable=W0212
-
-SVN_INFO_MOSART = """Path: components/mosart
-Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/mosart
-URL: https://svn-ccsm-models.cgd.ucar.edu/mosart/trunk_tags/mosart1_0_26
-Relative URL: ^/mosart/trunk_tags/mosart1_0_26
-Repository Root: https://svn-ccsm-models.cgd.ucar.edu
-Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5
-Revision: 86711
-Node Kind: directory
-Schedule: normal
-Last Changed Author: erik
-Last Changed Rev: 86031
-Last Changed Date: 2017-07-07 12:28:10 -0600 (Fri, 07 Jul 2017)
-"""
-SVN_INFO_CISM = """
-Path: components/cism
-Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/cism
-URL: https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_37
-Relative URL: ^/glc/trunk_tags/cism2_1_37
-Repository Root: https://svn-ccsm-models.cgd.ucar.edu
-Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5
-Revision: 86711
-Node Kind: directory
-Schedule: normal
-Last Changed Author: sacks
-Last Changed Rev: 85704
-Last Changed Date: 2017-06-15 05:59:28 -0600 (Thu, 15 Jun 2017)
-"""
-
-
-class TestSvnRepositoryCheckURL(unittest.TestCase):
- """Verify that the svn_check_url function is working as expected.
- """
-
- def setUp(self):
- """Setup reusable svn repository object
- """
- self._name = 'component'
- rdata = {ExternalsDescription.PROTOCOL: 'svn',
- ExternalsDescription.REPO_URL:
- 'https://svn-ccsm-models.cgd.ucar.edu',
- ExternalsDescription.TAG:
- 'mosart/trunk_tags/mosart1_0_26',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'junk',
- ExternalsDescription.EXTERNALS: '',
- ExternalsDescription.REPO: rdata,
- },
- }
-
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = SvnRepository('test', repo)
-
- def test_check_url_same(self):
- """Test that we correctly identify that the correct URL.
- """
- svn_output = SVN_INFO_MOSART
- expected_url = self._repo.url()
- result, current_version = \
- self._repo._check_url(svn_output, expected_url)
- self.assertEqual(result, ExternalStatus.STATUS_OK)
- self.assertEqual(current_version, 'mosart/trunk_tags/mosart1_0_26')
-
- def test_check_url_different(self):
- """Test that we correctly reject an incorrect URL.
- """
- svn_output = SVN_INFO_CISM
- expected_url = self._repo.url()
- result, current_version = \
- self._repo._check_url(svn_output, expected_url)
- self.assertEqual(result, ExternalStatus.MODEL_MODIFIED)
- self.assertEqual(current_version, 'glc/trunk_tags/cism2_1_37')
-
- def test_check_url_none(self):
- """Test that we can handle an empty string for output, e.g. not an svn
- repo.
-
- """
- svn_output = EMPTY_STR
- expected_url = self._repo.url()
- result, current_version = \
- self._repo._check_url(svn_output, expected_url)
- self.assertEqual(result, ExternalStatus.UNKNOWN)
- self.assertEqual(current_version, '')
-
-
-class TestSvnRepositoryCheckSync(unittest.TestCase):
- """Test whether the SvnRepository svn_check_sync functionality is
- correct.
-
- """
-
- def setUp(self):
- """Setup reusable svn repository object
- """
- self._name = "component"
- rdata = {ExternalsDescription.PROTOCOL: 'svn',
- ExternalsDescription.REPO_URL:
- 'https://svn-ccsm-models.cgd.ucar.edu/',
- ExternalsDescription.TAG:
- 'mosart/trunk_tags/mosart1_0_26',
- }
-
- data = {self._name:
- {
- ExternalsDescription.REQUIRED: False,
- ExternalsDescription.PATH: 'junk',
- ExternalsDescription.EXTERNALS: EMPTY_STR,
- ExternalsDescription.REPO: rdata,
- },
- }
-
- model = ExternalsDescriptionDict(data)
- repo = model[self._name][ExternalsDescription.REPO]
- self._repo = SvnRepository('test', repo)
-
- @staticmethod
- def _svn_info_empty(*_):
- """Return an empty info string. Simulates svn info failing.
- """
- return ''
-
- @staticmethod
- def _svn_info_synced(*_):
- """Return an info sting that is synced with the setUp data
- """
- return SVN_INFO_MOSART
-
- @staticmethod
- def _svn_info_modified(*_):
- """Return and info string that is modified from the setUp data
- """
- return SVN_INFO_CISM
-
- def test_repo_dir_not_exist(self):
- """Test that a directory that doesn't exist returns an error status
-
- Note: the Repository classes should be prevented from ever
- working on an empty directory by the _Source object.
-
- """
- stat = ExternalStatus()
- self._repo._check_sync(stat, 'junk')
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR)
- # check_dir should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_repo_dir_exist_no_svn_info(self):
- """Test that an empty info string returns an unknown status
- """
- stat = ExternalStatus()
- # Now we over-ride the _svn_info method on the repo to return
- # a known value without requiring access to svn.
- self._repo._svn_info = self._svn_info_empty
- self._repo._check_sync(stat, '.')
- self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN)
- # check_dir should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_repo_dir_synced(self):
- """Test that a valid info string that is synced to the repo in the
- externals description returns an ok status.
-
- """
- stat = ExternalStatus()
- # Now we over-ride the _svn_info method on the repo to return
- # a known value without requiring access to svn.
- self._repo._svn_info = self._svn_info_synced
- self._repo._check_sync(stat, '.')
- self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
- # check_dir should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
- def test_repo_dir_modified(self):
- """Test that a valid svn info string that is out of sync with the
- externals description returns a modified status.
-
- """
- stat = ExternalStatus()
- # Now we over-ride the _svn_info method on the repo to return
- # a known value without requiring access to svn.
- self._repo._svn_info = self._svn_info_modified
- self._repo._check_sync(stat, '.')
- self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
- # check_dir should only modify the sync_state, not clean_state
- self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
-
-
-class TestSVNStatusXML(unittest.TestCase):
- """Test parsing of svn status xml output
- """
- SVN_STATUS_XML_DIRTY_ALL = '''
-
-
-
-
-
-sacks
-2017-06-15T11:59:00.355419Z
-
-
-
-
-
-
-sacks
-2013-02-07T16:17:56.412878Z
-
-
-
-
-
-
-sacks
-2017-05-01T16:48:27.893741Z
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-'''
-
- SVN_STATUS_XML_DIRTY_MISSING = '''
-
-
-
-
-
-sacks
-2017-06-15T11:59:00.355419Z
-
-
-
-
-
-
-
-
-'''
-
- SVN_STATUS_XML_DIRTY_MODIFIED = '''
-
-
-
-
-
-sacks
-2013-02-07T16:17:56.412878Z
-
-
-
-
-
-
-
-
-'''
-
- SVN_STATUS_XML_DIRTY_DELETED = '''
-
-
-
-
-
-sacks
-2017-05-01T16:48:27.893741Z
-
-
-
-
-
-
-
-
-'''
-
- SVN_STATUS_XML_DIRTY_UNVERSION = '''
-
-
-
-
-
-
-
-
-
-
-
-'''
-
- SVN_STATUS_XML_DIRTY_ADDED = '''
-
-
-
-
-
-
-
-
-
-
-
-'''
-
- SVN_STATUS_XML_CLEAN = '''
-
-
-
-
-
-
-
-
-
-
-
-'''
-
- def test_xml_status_dirty_missing(self):
- """Verify that svn status output is consindered dirty when there is a
- missing file.
-
- """
- svn_output = self.SVN_STATUS_XML_DIRTY_MISSING
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertTrue(is_dirty)
-
- def test_xml_status_dirty_modified(self):
- """Verify that svn status output is consindered dirty when there is a
- modified file.
- """
- svn_output = self.SVN_STATUS_XML_DIRTY_MODIFIED
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertTrue(is_dirty)
-
- def test_xml_status_dirty_deleted(self):
- """Verify that svn status output is consindered dirty when there is a
- deleted file.
- """
- svn_output = self.SVN_STATUS_XML_DIRTY_DELETED
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertTrue(is_dirty)
-
- def test_xml_status_dirty_unversion(self):
- """Verify that svn status output ignores unversioned files when making
- the clean/dirty decision.
-
- """
- svn_output = self.SVN_STATUS_XML_DIRTY_UNVERSION
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertFalse(is_dirty)
-
- def test_xml_status_dirty_added(self):
- """Verify that svn status output is consindered dirty when there is a
- added file.
- """
- svn_output = self.SVN_STATUS_XML_DIRTY_ADDED
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertTrue(is_dirty)
-
- def test_xml_status_dirty_all(self):
- """Verify that svn status output is consindered dirty when there are
- multiple dirty files..
-
- """
- svn_output = self.SVN_STATUS_XML_DIRTY_ALL
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertTrue(is_dirty)
-
- def test_xml_status_dirty_clean(self):
- """Verify that svn status output is consindered clean when there are
- no 'dirty' files. This means accepting untracked and externals.
-
- """
- svn_output = self.SVN_STATUS_XML_CLEAN
- is_dirty = SvnRepository.xml_status_is_dirty(
- svn_output)
- self.assertFalse(is_dirty)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/manage_externals/test/test_unit_utils.py b/manage_externals/test/test_unit_utils.py
deleted file mode 100644
index 80e1636649..0000000000
--- a/manage_externals/test/test_unit_utils.py
+++ /dev/null
@@ -1,350 +0,0 @@
-#!/usr/bin/env python3
-
-"""Unit test driver for checkout_externals
-
-Note: this script assume the path to the checkout_externals.py module is
-already in the python path.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import os
-import unittest
-
-from manic.utils import last_n_lines, indent_string
-from manic.utils import str_to_bool, execute_subprocess
-from manic.utils import is_remote_url, split_remote_url, expand_local_url
-
-
-class TestExecuteSubprocess(unittest.TestCase):
- """Test the application logic of execute_subprocess wrapper
- """
-
- def test_exesub_return_stat_err(self):
- """Test that execute_subprocess returns a status code when caller
- requests and the executed subprocess fails.
-
- """
- cmd = ['false']
- status = execute_subprocess(cmd, status_to_caller=True)
- self.assertEqual(status, 1)
-
- def test_exesub_return_stat_ok(self):
- """Test that execute_subprocess returns a status code when caller
- requests and the executed subprocess succeeds.
-
- """
- cmd = ['true']
- status = execute_subprocess(cmd, status_to_caller=True)
- self.assertEqual(status, 0)
-
- def test_exesub_except_stat_err(self):
- """Test that execute_subprocess raises an exception on error when
- caller doesn't request return code
-
- """
- cmd = ['false']
- with self.assertRaises(RuntimeError):
- execute_subprocess(cmd, status_to_caller=False)
-
-
-class TestLastNLines(unittest.TestCase):
- """Test the last_n_lines function.
-
- """
-
- def test_last_n_lines_short(self):
- """With a message with <= n lines, result of last_n_lines should
- just be the original message.
-
- """
- mystr = """three
-line
-string
-"""
-
- mystr_truncated = last_n_lines(
- mystr, 3, truncation_message='[truncated]')
- self.assertEqual(mystr, mystr_truncated)
-
- def test_last_n_lines_long(self):
- """With a message with > n lines, result of last_n_lines should
- be a truncated string.
-
- """
- mystr = """a
-big
-five
-line
-string
-"""
- expected = """[truncated]
-five
-line
-string
-"""
-
- mystr_truncated = last_n_lines(
- mystr, 3, truncation_message='[truncated]')
- self.assertEqual(expected, mystr_truncated)
-
-
-class TestIndentStr(unittest.TestCase):
- """Test the indent_string function.
-
- """
-
- def test_indent_string_singleline(self):
- """Test the indent_string function with a single-line string
-
- """
- mystr = 'foo'
- result = indent_string(mystr, 4)
- expected = ' foo'
- self.assertEqual(expected, result)
-
- def test_indent_string_multiline(self):
- """Test the indent_string function with a multi-line string
-
- """
- mystr = """hello
-hi
-goodbye
-"""
- result = indent_string(mystr, 2)
- expected = """ hello
- hi
- goodbye
-"""
- self.assertEqual(expected, result)
-
-
-class TestStrToBool(unittest.TestCase):
- """Test the string to boolean conversion routine.
-
- """
-
- def test_case_insensitive_true(self):
- """Verify that case insensitive variants of 'true' returns the True
- boolean.
-
- """
- values = ['true', 'TRUE', 'True', 'tRuE', 't', 'T', ]
- for value in values:
- received = str_to_bool(value)
- self.assertTrue(received)
-
- def test_case_insensitive_false(self):
- """Verify that case insensitive variants of 'false' returns the False
- boolean.
-
- """
- values = ['false', 'FALSE', 'False', 'fAlSe', 'f', 'F', ]
- for value in values:
- received = str_to_bool(value)
- self.assertFalse(received)
-
- def test_invalid_str_error(self):
- """Verify that a non-true/false string generates a runtime error.
- """
- values = ['not_true_or_false', 'A', '1', '0',
- 'false_is_not_true', 'true_is_not_false']
- for value in values:
- with self.assertRaises(RuntimeError):
- str_to_bool(value)
-
-
-class TestIsRemoteURL(unittest.TestCase):
- """Crude url checking to determine if a url is local or remote.
-
- """
-
- def test_url_remote_git(self):
- """verify that a remote git url is identified.
- """
- url = 'git@somewhere'
- is_remote = is_remote_url(url)
- self.assertTrue(is_remote)
-
- def test_url_remote_ssh(self):
- """verify that a remote ssh url is identified.
- """
- url = 'ssh://user@somewhere'
- is_remote = is_remote_url(url)
- self.assertTrue(is_remote)
-
- def test_url_remote_http(self):
- """verify that a remote http url is identified.
- """
- url = 'http://somewhere'
- is_remote = is_remote_url(url)
- self.assertTrue(is_remote)
-
- def test_url_remote_https(self):
- """verify that a remote https url is identified.
- """
- url = 'https://somewhere'
- is_remote = is_remote_url(url)
- self.assertTrue(is_remote)
-
- def test_url_local_user(self):
- """verify that a local path with '~/path/to/repo' gets rejected
-
- """
- url = '~/path/to/repo'
- is_remote = is_remote_url(url)
- self.assertFalse(is_remote)
-
- def test_url_local_var_curly(self):
- """verify that a local path with env var '${HOME}' gets rejected
- """
- url = '${HOME}/path/to/repo'
- is_remote = is_remote_url(url)
- self.assertFalse(is_remote)
-
- def test_url_local_var(self):
- """verify that a local path with an env var '$HOME' gets rejected
- """
- url = '$HOME/path/to/repo'
- is_remote = is_remote_url(url)
- self.assertFalse(is_remote)
-
- def test_url_local_abs(self):
- """verify that a local abs path gets rejected
- """
- url = '/path/to/repo'
- is_remote = is_remote_url(url)
- self.assertFalse(is_remote)
-
- def test_url_local_rel(self):
- """verify that a local relative path gets rejected
- """
- url = '../../path/to/repo'
- is_remote = is_remote_url(url)
- self.assertFalse(is_remote)
-
-
-class TestSplitRemoteURL(unittest.TestCase):
- """Crude url checking to determine if a url is local or remote.
-
- """
-
- def test_url_remote_git(self):
- """verify that a remote git url is identified.
- """
- url = 'git@somewhere.com:org/repo'
- received = split_remote_url(url)
- self.assertEqual(received, "org/repo")
-
- def test_url_remote_ssh(self):
- """verify that a remote ssh url is identified.
- """
- url = 'ssh://user@somewhere.com/path/to/repo'
- received = split_remote_url(url)
- self.assertEqual(received, 'somewhere.com/path/to/repo')
-
- def test_url_remote_http(self):
- """verify that a remote http url is identified.
- """
- url = 'http://somewhere.org/path/to/repo'
- received = split_remote_url(url)
- self.assertEqual(received, 'somewhere.org/path/to/repo')
-
- def test_url_remote_https(self):
- """verify that a remote http url is identified.
- """
- url = 'http://somewhere.gov/path/to/repo'
- received = split_remote_url(url)
- self.assertEqual(received, 'somewhere.gov/path/to/repo')
-
- def test_url_local_url_unchanged(self):
- """verify that a local path is unchanged
-
- """
- url = '/path/to/repo'
- received = split_remote_url(url)
- self.assertEqual(received, url)
-
-
-class TestExpandLocalURL(unittest.TestCase):
- """Crude url checking to determine if a url is local or remote.
-
- Remote should be unmodified.
-
- Local, should perform user and variable expansion.
-
- """
-
- def test_url_local_user1(self):
- """verify that a local path with '~/path/to/repo' gets expanded to an
- absolute path.
-
- NOTE(bja, 2017-11) we can't test for something like:
- '~user/path/to/repo' because the user has to be in the local
- machine password directory and we don't know a user name that
- is valid on every system....?
-
- """
- field = 'test'
- url = '~/path/to/repo'
- received = expand_local_url(url, field)
- print(received)
- self.assertTrue(os.path.isabs(received))
-
- def test_url_local_expand_curly(self):
- """verify that a local path with '${HOME}' gets expanded to an absolute path.
- """
- field = 'test'
- url = '${HOME}/path/to/repo'
- received = expand_local_url(url, field)
- self.assertTrue(os.path.isabs(received))
-
- def test_url_local_expand_var(self):
- """verify that a local path with '$HOME' gets expanded to an absolute path.
- """
- field = 'test'
- url = '$HOME/path/to/repo'
- received = expand_local_url(url, field)
- self.assertTrue(os.path.isabs(received))
-
- def test_url_local_env_missing(self):
- """verify that a local path with env var that is missing gets left as-is
-
- """
- field = 'test'
- url = '$TMP_VAR/path/to/repo'
- received = expand_local_url(url, field)
- print(received)
- self.assertEqual(received, url)
-
- def test_url_local_expand_env(self):
- """verify that a local path with another env var gets expanded to an
- absolute path.
-
- """
- field = 'test'
- os.environ['TMP_VAR'] = '/some/absolute'
- url = '$TMP_VAR/path/to/repo'
- received = expand_local_url(url, field)
- del os.environ['TMP_VAR']
- print(received)
- self.assertTrue(os.path.isabs(received))
- self.assertEqual(received, '/some/absolute/path/to/repo')
-
- def test_url_local_normalize_rel(self):
- """verify that a local path with another env var gets expanded to an
- absolute path.
-
- """
- field = 'test'
- url = '/this/is/a/long/../path/to/a/repo'
- received = expand_local_url(url, field)
- print(received)
- self.assertEqual(received, '/this/is/a/path/to/a/repo')
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/py_env_create b/py_env_create
index 4b3612cfda..ac705edb3c 100755
--- a/py_env_create
+++ b/py_env_create
@@ -20,6 +20,7 @@ if [ $error != 0 ]; then
echo "For notes on installing on a user system see: https://docs.conda.io/projects/conda/en/latest/user-guide/install/index.html"
echo "Error code was $error"
cat condahelp.txt
+ rm condahelp.txt
exit -1
fi
rm condahelp.txt
@@ -44,7 +45,7 @@ usage() {
echo "[-v|--verbose] "
echo " Run with verbose mode for the install so you see the progress bar"
echo "[-f|--file ] "
- echo " Conda environment file to use (can be a text format or YAML format)"
+ echo " Conda environment requirements text file to use (text format) in addition to the others"
echo " Assumed to be under the directory: $condadir"
echo " Default is: $condafile"
echo "[--option