diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg
index 378992c777..866b9ac5b2 100644
--- a/Externals_CLM.cfg
+++ b/Externals_CLM.cfg
@@ -2,7 +2,7 @@
local_path = src/fates
protocol = git
repo_url = https://github.com/NGEET/fates
-tag = sci.1.72.2_api.34.0.0
+tag = sci.1.73.0_api.35.0.0
required = True
[externals_description]
diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm
index eb34ac916f..fb44023cd5 100755
--- a/bld/CLMBuildNamelist.pm
+++ b/bld/CLMBuildNamelist.pm
@@ -4546,6 +4546,10 @@ sub setup_logic_fates {
if ( $nl->get_value('fates_spitfire_mode') > 0 ) {
$log->fatal_error('fates_spitfire_mode can NOT be set to greater than 0 when use_fates_sp is true');
}
+ # hydro isn't currently supported to work when FATES SP mode is active
+ if (&value_is_true( $nl->get_value('use_fates_planthydro') )) {
+ $log->fatal_error('fates sp mode is currently not supported to work with fates hydro');
+ }
}
}
my $var = "use_fates_inventory_init";
diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml
index ecced7a6e2..147a23f49a 100644
--- a/bld/namelist_files/namelist_defaults_ctsm.xml
+++ b/bld/namelist_files/namelist_defaults_ctsm.xml
@@ -480,7 +480,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-lnd/clm2/paramdata/fates_params_api.32.0.0_12pft_c231215.nc
+lnd/clm2/paramdata/fates_params_api.35.0.0_12pft_c240326.nc
diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl
index c8875090cd..7b654337af 100755
--- a/bld/unit_testers/build-namelist_test.pl
+++ b/bld/unit_testers/build-namelist_test.pl
@@ -163,7 +163,7 @@ sub cat_and_create_namelistinfile {
#
# Figure out number of tests that will run
#
-my $ntests = 2511;
+my $ntests = 2513;
if ( defined($opts{'compare'}) ) {
$ntests += 1545;
@@ -1023,6 +1023,16 @@ sub cat_and_create_namelistinfile {
GLC_TWO_WAY_COUPLING=>"FALSE",
phys=>"clm4_5",
},
+ "usespitfireusefatessp" =>{ options=>"-envxml_dir . --bgc fates",
+ namelst=>"fates_spitfire_mode=1,use_fates_sp=.true.",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
+ "usefatesspusefateshydro" =>{ options=>"-envxml_dir . --bgc fates",
+ namelst=>"use_fates_sp=.true.,use_fates_planthydro=.true.",
+ GLC_TWO_WAY_COUPLING=>"FALSE",
+ phys=>"clm5_0",
+ },
"useloggingButNOTFATES" =>{ options=>"-envxml_dir . -no-megan",
namelst=>"use_fates_logging=.true.",
GLC_TWO_WAY_COUPLING=>"FALSE",
diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml
index fe6a54c221..3365565a5e 100644
--- a/cime_config/testdefs/ExpectedTestFails.xml
+++ b/cime_config/testdefs/ExpectedTestFails.xml
@@ -58,13 +58,6 @@
-
-
- FAIL
- #1733
-
-
-
FAIL
@@ -197,13 +190,6 @@
-
-
- FAIL
- FATES#701
-
-
-
FAIL
@@ -233,7 +219,7 @@
-
+
FAIL
FATES#1089
@@ -247,31 +233,31 @@
-
-
+
+
FAIL
- #2325
+ #2423
-
+
FAIL
#2325
-
+
FAIL
#2325
-
+
FAIL
- #2478
+ #2325
diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml
index 05526b3529..c2a90507ba 100644
--- a/cime_config/testdefs/testlist_clm.xml
+++ b/cime_config/testdefs/testlist_clm.xml
@@ -2654,24 +2654,24 @@
-
+
-
+
-
+
-
+
@@ -3665,6 +3665,16 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/doc/ChangeLog b/doc/ChangeLog
index 671f5020de..e5c2466085 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,4 +1,121 @@
===============================================================
+Tag name: ctsm5.2.002
+Originator(s): glemieux (Gregory Lemieux, LBNL, glemieux@lbl.gov)
+Date: Fri 26 Apr 2024 11:13:46 AM MDT
+One-line Summary: FATES default allometry parameter file update
+
+Purpose and description of changes
+----------------------------------
+
+This updates the default FATES parameter file which includes a number
+of changes:
+
+ - Default global tree pft allometry update
+ - New allometric mode options
+ - New scaling coefficients for alternative leaf maintenance respiration
+ - New switch to control the use of host land model day length scaling factor
+
+This also incorporates some testing additions and clean up, including:
+
+ - Removes cheyenne expected failure tests that have been converted to derecho
+ - Adds a 5x5_amazon test to aux_clm and the expected failures list
+ - Temporarilry converts a fates 5x5_amazon test to f10 test
+ - Adds namelist check and corresponding unit test to make sure fates hydro
+ and fates satellite phenology mode can not be used together
+
+The FATES externals tag is also updated which includes a number of bug fixes
+and the addition of new history output.
+
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm6_0
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ NOTE: FATES hydro mode and FATES satellite phenology mode can not
+ be used in conjunction as of this API update
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ FATES parameter file default updated to fates_params_api.35.0.0_12pft_c240326.nc
+
+
+Notes of particular relevance for developers:
+---------------------------------------------
+Changes to tests or testing:
+ A 5x5_amazon smoke test, not using MPI-serial has been added to the test list
+ and list of expected failures to track issue #2423.
+ Out-of-date cheyenne tests on the expected failure list have been removed.
+
+
+Testing summary:
+----------------
+
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ derecho - PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ derecho ----- OK
+ izumi ------- OK
+
+ fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--)
+
+ derecho ----- OK
+ izumi ------- OK
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: Yes, only for FATES configurations
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: FATES
+ - what platforms/compilers: ALL
+ - nature of change (roundoff; larger than roundoff/same climate; new climate): larger than roundoff
+
+ The FATES externals update incorporates a number of bug fixes and the new allometry default
+ paramters result in a new scientific baseline.
+
+
+Other details
+-------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+ FATES: sci.1.72.2_api.34.0.0 -> sci.1.73.0_api.35.0.0
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+
+#2436 -- FATES API35 parameter file update
+NGEET#1093 -- Update default allometry parameters for tree PFTs
+NGEET#1128 -- New allometric modes
+NGEET#1149 -- Alternative vertical scaling of leaf maintenance respiration
+NGEET#1161 -- Adding day length factor switch
+
+===============================================================
+===============================================================
Tag name: ctsm5.2.001
Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)
Date: Mon 22 Apr 2024 02:10:55 PM MDT
diff --git a/doc/ChangeSum b/doc/ChangeSum
index 2451783ec2..40033a7d6f 100644
--- a/doc/ChangeSum
+++ b/doc/ChangeSum
@@ -1,5 +1,6 @@
Tag Who Date Summary
============================================================================================================================
+ ctsm5.2.002 glemieux 04/26/2024 FATES default allometry parameter file update
ctsm5.2.001 erik 04/22/2024 Merge b4b-dev
ctsm5.2.0 many 04/20/2024 New mksurfdata_esmf tool to create new surface datasets that are in place
ctsm5.1.dev176 afoster 04/04/2024 Merge b4b-dev
diff --git a/python/ctsm/site_and_regional/plumber2_surf_wrapper.py b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py
new file mode 100755
index 0000000000..022914d17e
--- /dev/null
+++ b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py
@@ -0,0 +1,183 @@
+#! /usr/bin/env python3
+"""
+|------------------------------------------------------------------|
+|--------------------- Instructions -----------------------------|
+|------------------------------------------------------------------|
+This script is a simple wrapper for neon sites that performs the
+following:
+ 1) For neon sites, subset surface dataset from global dataset
+ (i.e. ./subset_data.py )
+ 2) Download neon and update the created surface dataset
+ based on the downloaded neon data.
+ (i.e. modify_singlept_site_neon.py)
+
+Instructions for running using conda python environments:
+
+../../py_env_create
+conda activate ctsm_py
+
+"""
+# Import libraries
+from __future__ import print_function
+
+import argparse
+import logging
+import os
+import subprocess
+import tqdm
+
+import pandas as pd
+
+
+def get_parser():
+ """
+ Get parser object for this script.
+ """
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ parser.print_usage = parser.print_help
+
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ help="Verbose mode will print more information. ",
+ action="store_true",
+ dest="verbose",
+ default=False,
+ )
+
+ parser.add_argument(
+ "--16pft",
+ help="Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ",
+ action="store_true",
+ dest="pft_16",
+ default=True,
+ )
+
+ return parser
+
+
+def execute(command):
+ """
+ Function for running a command on shell.
+ Args:
+ command (str):
+ command that we want to run.
+ Raises:
+ Error with the return code from shell.
+ """
+ print("\n", " >> ", *command, "\n")
+
+ try:
+ subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT)
+
+ except subprocess.CalledProcessError as err:
+ # raise RuntimeError("command '{}' return with error
+ # (code {}): {}".format(e.cmd, e.returncode, e.output))
+ # print (e.ouput)
+ print(err)
+
+
+def main():
+ """
+ Read plumber2_sites from csv, iterate through sites, and add dominant PFT
+ """
+
+ args = get_parser().parse_args()
+
+ if args.verbose:
+ logging.basicConfig(level=logging.DEBUG)
+
+ plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4)
+
+ for _, row in tqdm.tqdm(plumber2_sites.iterrows()):
+ lat = row["Lat"]
+ lon = row["Lon"]
+ site = row["Site"]
+ pft1 = row["pft1"]
+ pctpft1 = row["pft1-%"]
+ cth1 = row["pft1-cth"]
+ cbh1 = row["pft1-cbh"]
+ pft2 = row["pft2"]
+ pctpft2 = row["pft2-%"]
+ cth2 = row["pft2-cth"]
+ cbh2 = row["pft2-cbh"]
+ # overwrite missing values from .csv file
+ if pft1 == -999:
+ pft1 = 0
+ pctpft1 = 0
+ cth1 = 0
+ cbh1 = 0
+ if pft2 == -999:
+ pft2 = 0
+ pctpft2 = 0
+ cth2 = 0
+ cbh2 = 0
+ clmsite = "1x1_PLUMBER2_" + site
+ print("Now processing site :", site)
+
+ if args.pft_16:
+ # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT
+ # don't set crop flag
+ # set dominant pft
+ subset_command = [
+ "./subset_data",
+ "point",
+ "--lat",
+ str(lat),
+ "--lon",
+ str(lon),
+ "--site",
+ clmsite,
+ "--dompft",
+ str(pft1),
+ str(pft2),
+ "--pctpft",
+ str(pctpft1),
+ str(pctpft2),
+ "--cth",
+ str(cth1),
+ str(cth2),
+ "--cbh",
+ str(cbh1),
+ str(cbh2),
+ "--create-surface",
+ "--uniform-snowpack",
+ "--cap-saturation",
+ "--verbose",
+ "--overwrite",
+ ]
+ else:
+ # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT
+ # NOTE: FATES will currently not run with a 78-PFT surface dataset
+ # set crop flag
+ # set dominant pft
+ subset_command = [
+ "./subset_data",
+ "point",
+ "--lat",
+ str(lat),
+ "--lon",
+ str(lon),
+ "--site",
+ clmsite,
+ "--crop",
+ "--dompft",
+ str(pft1),
+ str(pft2),
+ "--pctpft",
+ str(pctpft1),
+ str(pctpft2),
+ "--create-surface",
+ "--uniform-snowpack",
+ "--cap-saturation",
+ "--verbose",
+ "--overwrite",
+ ]
+ execute(subset_command)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/ctsm/site_and_regional/plumber2_usermods.py b/python/ctsm/site_and_regional/plumber2_usermods.py
new file mode 100644
index 0000000000..2fe183fb77
--- /dev/null
+++ b/python/ctsm/site_and_regional/plumber2_usermods.py
@@ -0,0 +1,191 @@
+#! /usr/bin/env python3
+
+"""
+
+Reads in .csv files with PLUMBER2 site information
+Creates individual usermod_dirs for each PLUMBER2 site with shell_commands
+
+"""
+
+# Import libraries
+from __future__ import print_function
+
+import os
+import tqdm
+
+import pandas as pd
+
+
+# Big ugly function to create usermod_dirs for each site
+def write_usermods(
+ lat, lon, site, start_year, end_year, start_date, start_year_actual, start_tod, atm_ncpl, stop_n
+):
+ """
+ Write information to be added to user mods
+ """
+
+ site_dir = os.path.join("../../cime_config/usermods_dirs/PLUMBER2/", site)
+
+ if not os.path.isdir(site_dir):
+ os.makedirs(site_dir, exist_ok=True)
+
+ # create files in each directory
+ include = os.path.join(site_dir, "include_user_mods")
+ i_file = open(include, "w") # or 'a' to add text instead of truncate
+ i_file.write("../defaults")
+ i_file.close()
+
+ # pylint: disable=anomalous-backslash-in-string
+ lai_stream = (
+ "\$DIN_LOC_ROOT/lnd/clm2/lai_streams/PLUMBER2/"
+ + site
+ + "/LAI_stream_"
+ + site
+ + "_"
+ + str(start_year)
+ + "-"
+ + str(end_year)
+ + ".nc"
+ )
+ shell = os.path.join(site_dir, "shell_commands")
+ s_file = open(shell, "w") # or 'a' to add text instead of truncate
+ # pylint: disable=line-too-long
+ s_file.write(
+ # TODO turn on following line after cdeps changes are added
+ #'./xmlchange PLUMBER2SITE='+site + '\n' \
+ "./xmlchange PTS_LON=" + str(lon) + "\n"
+ "./xmlchange PTS_LAT=" + str(lat) + "\n"
+ "./xmlchange DATM_YR_END=" + str(end_year) + "\n"
+ "./xmlchange START_TOD=" + str(start_tod) + "\n"
+ "./xmlchange ATM_NCPL=" + str(atm_ncpl) + "\n"
+ "\n" # TODO, get working for CTSM5.1:
+ # remove the above line as it's redundant after PLUMBER2SITE is added
+ # Alternatively, we can take this out of default/user_nl_clm
+ # since doing it this way is works fine TODO for 5.2
+ "echo \"fsurdat='/glade/u/home/wwieder/CTSM/tools/site_and_regional/subset_data_single_point/surfdata_1x1_PLUMBER2_"
+ + site
+ + "_hist_16pfts_Irrig_CMIP6_simyr2000_c231005.nc ' \" >> user_nl_clm \n"
+ 'echo "CLM_USRDAT.PLUMBER2:datafiles= \$DIN_LOC_ROOT/atm/datm7/CLM1PT_data/PLUMBER2/'
+ + site
+ + "/CLM1PT_data/CTSM_DATM_"
+ + site
+ + "_"
+ + str(start_year)
+ + "-"
+ + str(end_year)
+ + '.nc " >> user_nl_datm_streams \n'
+ 'echo "presaero.SSP3-7.0:year_first=' + str(start_year) + '" >> user_nl_datm_streams \n'
+ 'echo "presaero.SSP3-7.0:year_last=' + str(end_year) + '" >> user_nl_datm_streams \n'
+ 'echo "presaero.SSP3-7.0:year_align=' + str(start_year) + '" >> user_nl_datm_streams \n'
+ "\n"
+ 'echo "presndep.SSP3-7.0:year_first=' + str(start_year) + '" >> user_nl_datm_streams \n'
+ 'echo "presndep.SSP3-7.0:year_last=' + str(end_year) + '" >> user_nl_datm_streams \n'
+ 'echo "presndep.SSP3-7.0:year_align=' + str(start_year) + '" >> user_nl_datm_streams \n'
+ "\n"
+ 'echo "co2tseries.SSP3-7.0:year_first=' + str(start_year) + '" >> user_nl_datm_streams \n'
+ 'echo "co2tseries.SSP3-7.0:year_last=' + str(end_year) + '" >> user_nl_datm_streams \n'
+ 'echo "co2tseries.SSP3-7.0:year_align=' + str(start_year) + '" >> user_nl_datm_streams \n'
+ "\n"
+ "compset=`./xmlquery COMPSET --value` \n"
+ "CLM_USRDAT_NAME=`./xmlquery CLM_USRDAT_NAME --value` \n"
+ "TEST=`./xmlquery TEST --value` \n"
+ "\n"
+ "# For a transient case run the whole length and do not cycle \n"
+ "if [[ $compset =~ ^HIST ]]; then \n"
+ " # Number of years that can be run for the full transient case \n"
+ ' if [[ $TEST != "TRUE" ]]; then \n'
+ " ./xmlchange STOP_N=" + str(stop_n) + "\n"
+ " fi \n"
+ " # set start date for transient case with historical compset \n"
+ " ./xmlchange RUN_STARTDATE=" + str(start_date) + "\n"
+ " ./xmlchange DATM_YR_ALIGN=" + str(start_year_actual) + "\n"
+ " ./xmlchange DATM_YR_START=" + str(start_year_actual) + "\n"
+ "else \n"
+ " # for spinup case with I2000 compset \n"
+ " ./xmlchange RUN_STARTDATE=0001-01-01" + "\n"
+ " ./xmlchange DATM_YR_ALIGN=" + str(1) + "\n"
+ " ./xmlchange DATM_YR_START=" + str(start_year) + "\n"
+ "fi \n"
+ "\n"
+ "# Turn on LAI streams for a SP case \n"
+ "if [[ $compset =~ .*CLM[0-9]+%[^_]*SP.* ]]; then \n"
+ " echo \"stream_fldfilename_lai='" + lai_stream + "'\" >> user_nl_clm \n"
+ ' echo "stream_year_last_lai=' + str(end_year) + '" >> user_nl_clm \n'
+ " if [[ $compset =~ ^HIST ]]; then \n"
+ " # for transient case with a historical compset \n"
+ ' echo "model_year_align_lai=' + str(start_year_actual) + '" >> user_nl_clm \n'
+ ' echo "stream_year_first_lai=' + str(start_year_actual) + '" >> user_nl_clm \n'
+ " else \n"
+ " # for a spinup case with a i2000 compset \n"
+ ' echo "model_year_align_lai=1" >> user_nl_clm \n'
+ ' echo "stream_year_first_lai=' + str(start_year) + '" >> user_nl_clm \n'
+ " fi \n"
+ "fi \n"
+ "\n"
+ )
+ # pylint: enable=line-too-long, anomalous-backslash-in-string
+
+ s_file.close()
+
+ # add baseflow_scalar = 0 to user_nl_clm for wetland sites
+ wetland = [
+ "CZ-wet",
+ "DE-SfN",
+ "FI-Kaa",
+ "FI-Lom",
+ "RU-Che",
+ "SE-Deg",
+ "US-Los",
+ "US-Myb",
+ "US-Tw4",
+ "PL-wet",
+ ]
+ if any(x == site for x in wetland):
+ s_file = open(shell, "a") # or 'a' to add text instead of truncate
+ s_file.write(
+ "\n"
+ "# set baseflow scalar to zero for wetland site \n"
+ 'echo "baseflow_scalar = 0" >> user_nl_clm'
+ )
+ s_file.close()
+
+
+# End write_usermods function
+
+
+def main():
+ """
+ Iterate through plumber2 sites and create usermod_dirs
+ """
+
+ # For now we can just run the 'main' program as a loop
+ plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4)
+
+ for _, row in tqdm.tqdm(plumber2_sites.iterrows()):
+ lat = row["Lat"]
+ lon = row["Lon"]
+ site = row["Site"]
+ start_year = row["start_year"]
+ end_year = row["end_year"]
+ start_date = row["RUN_STARTDATE"]
+ start_year_actual = start_date[:4]
+ start_tod = row["START_TOD"]
+ atm_ncpl = row["ATM_NCPL"]
+ stop_n = 1 + end_year - start_year
+
+ write_usermods(
+ lat,
+ lon,
+ site,
+ start_year,
+ end_year,
+ start_date,
+ start_year_actual,
+ start_tod,
+ atm_ncpl,
+ stop_n,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/ctsm/test/test_unit_plumber2_surf_wrapper.py b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py
new file mode 100755
index 0000000000..66f5578caa
--- /dev/null
+++ b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+"""
+Unit tests for plumber2_surf_wrapper
+
+You can run this by:
+ python -m unittest test_unit_plumber2_surf_wrapper.py
+"""
+
+import unittest
+import os
+import sys
+
+# -- add python/ctsm to path (needed if we want to run the test stand-alone)
+_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir)
+sys.path.insert(1, _CTSM_PYTHON)
+
+# pylint: disable=wrong-import-position
+from ctsm import unit_testing
+from ctsm.site_and_regional.plumber2_surf_wrapper import get_parser
+
+# pylint: disable=invalid-name
+
+
+class TestPlumber2SurfWrapper(unittest.TestCase):
+ """
+ Basic class for testing plumber2_surf_wrapper.py.
+ """
+
+ def test_parser(self):
+ """
+ Test that parser has same defaults as expected
+ """
+
+ self.assertEqual(get_parser().argument_default, None, "Parser not working as expected")
+
+
+if __name__ == "__main__":
+ unit_testing.setup_for_tests()
+ unittest.main()
diff --git a/tools/site_and_regional/plumber2_surf_wrapper b/tools/site_and_regional/plumber2_surf_wrapper
new file mode 100755
index 0000000000..b37bc19dc9
--- /dev/null
+++ b/tools/site_and_regional/plumber2_surf_wrapper
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+"""
+This is a just top-level skeleton script that calls
+plumber2_surf_wrapper.py.
+The original code (plumber2_surf_wrapper.py) is located under
+python/ctsm/site_and_regional folder.
+
+For full instructions on how to run the code and different options,
+please check python/ctsm/site_and_regional/plumber2_surf_wrapper.py file.
+
+This script is a simple wrapper for plumber sites that performs the
+following:
+ 1) For plumber sites, subset surface dataset from global dataset
+ 2) Download plumber and update the created surface dataset
+ based on the downloaded neon data.
+
+----------------------------------------------------------------
+Instructions for running using conda python environments:
+../../py_env_create
+conda activate ctsm_pylib
+"""
+
+import os
+import sys
+
+# -- add python/ctsm to path
+_CTSM_PYTHON = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python"
+)
+sys.path.insert(1, _CTSM_PYTHON)
+
+from ctsm.site_and_regional.plumber2_surf_wrapper import main
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/site_and_regional/plumber2_surf_wrapper.py b/tools/site_and_regional/plumber2_surf_wrapper.py
deleted file mode 100755
index d68875ce6a..0000000000
--- a/tools/site_and_regional/plumber2_surf_wrapper.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#! /usr/bin/env python3
-"""
-|------------------------------------------------------------------|
-|--------------------- Instructions -----------------------------|
-|------------------------------------------------------------------|
-This script is a simple wrapper for neon sites that performs the
-following:
- 1) For neon sites, subset surface dataset from global dataset
- (i.e. ./subset_data.py )
- 2) Download neon and update the created surface dataset
- based on the downloaded neon data.
- (i.e. modify_singlept_site_neon.py)
-
-Instructions for running using conda python environments:
-
-../../py_env_create
-conda activate ctsm_py
-
-"""
-# Import libraries
-from __future__ import print_function
-
-import os
-import sys
-import tqdm
-import logging
-import argparse
-import subprocess
-
-import pandas as pd
-
-
-
-
-def get_parser():
- """
- Get parser object for this script.
- """
- parser = argparse.ArgumentParser(description=__doc__,
- formatter_class=argparse.RawDescriptionHelpFormatter)
-
- parser.print_usage = parser.print_help
-
- parser.add_argument('-v','--verbose',
- help='Verbose mode will print more information. ',
- action="store_true",
- dest="verbose",
- default=False)
-
- parser.add_argument('--16pft',
- help='Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ',
- action="store_true",
- dest="pft_16",
- default=True)
-
- return parser
-
-
-def execute(command):
- """
- Function for running a command on shell.
- Args:
- command (str):
- command that we want to run.
- Raises:
- Error with the return code from shell.
- """
- print ('\n',' >> ',*command,'\n')
-
- try:
- subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT)
-
- except subprocess.CalledProcessError as e:
- #raise RuntimeError("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
- #print (e.ouput)
- print (e)
-
-
-
-
-
-
-def main():
-
- args = get_parser().parse_args()
-
- if args.verbose:
- logging.basicConfig(level=logging.DEBUG)
-
-
- plumber2_sites = pd.read_csv('PLUMBER2_sites.csv', skiprows=4)
-
- for i, row in tqdm.tqdm(plumber2_sites.iterrows()):
- lat = row['Lat']
- lon = row['Lon']
- site = row['Site']
- pft1 = row['pft1']
- pctpft1 = row['pft1-%']
- cth1=row['pft1-cth']
- cbh1=row['pft1-cbh']
- pft2 = row['pft2']
- pctpft2 = row['pft2-%']
- cth2=row['pft2-cth']
- cbh2=row['pft2-cbh']
- # overwrite missing values from .csv file
- if pft1 == -999:
- pft1 = 0
- pctpft1 = 0
- cth1 = 0
- cbh1 = 0
- if pft2 == -999:
- pft2 = 0
- pctpft2 = 0
- cth2 = 0
- cbh2 = 0
- clmsite = "1x1_PLUMBER2_"+site
- print ("Now processing site :", site)
-
- if args.pft_16:
- # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT
- # don't set crop flag
- # set dominant pft
- subset_command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon),
- '--site',clmsite,'--dompft',str(pft1),str(pft2),
- '--pctpft', str(pctpft1),str(pctpft2),
- '--cth', str(cth1),str(cth2),
- '--cbh', str(cbh1),str(cbh2),
- '--create-surface',
- '--uniform-snowpack','--cap-saturation','--verbose','--overwrite']
- else:
- # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT
- # NOTE: FATES will currently not run with a 78-PFT surface dataset
- # set crop flag
- # set dominant pft
- subset_command = ['./subset_data', 'point', '--lat', str(lat), '--lon', str(lon),
- '--site', clmsite,'--crop', '--dompft', str(pft1),str(pft2),
- '--pctpft', str(pctpft1),str(pctpft2), '--create-surface',
- '--uniform-snowpack', '--cap-saturation', '--verbose', '--overwrite']
- execute(subset_command)
-
-if __name__ == "__main__":
- main()
-
diff --git a/tools/site_and_regional/plumber2_usermods b/tools/site_and_regional/plumber2_usermods
new file mode 100755
index 0000000000..d093948336
--- /dev/null
+++ b/tools/site_and_regional/plumber2_usermods
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+"""
+This is a just top-level skeleton script that calls
+plumber2_usermods.py.
+The original code (plumber2_usermods.py) is located under
+python/ctsm/site_and_regional folder.
+
+For full instructions on how to run the code and different options,
+please check python/ctsm/site_and_regional/plumber2_usermods.py file.
+
+This script is a simple wrapper for plumber sites that creates usermod_dirs
+for each site.
+
+----------------------------------------------------------------
+Instructions for running using conda python environments:
+../../py_env_create
+conda activate ctsm_pylib
+"""
+
+import os
+import sys
+
+# -- add python/ctsm to path
+_CTSM_PYTHON = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python"
+)
+sys.path.insert(1, _CTSM_PYTHON)
+
+from ctsm.site_and_regional.plumber2_usermods import main
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/site_and_regional/plumber2_usermods.py b/tools/site_and_regional/plumber2_usermods.py
deleted file mode 100644
index c12e539c31..0000000000
--- a/tools/site_and_regional/plumber2_usermods.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#! /usr/bin/env python3
-
-"""
-
-Reads in .csv files with PLUMBER2 site information
-Creates individual usermod_dirs for each PLUMBER2 site with shell_commands
-
-"""
-
-# Import libraries
-from __future__ import print_function
-
-import os
-import sys
-import tqdm
-import logging
-import subprocess
-
-import pandas as pd
-
-
-# Big ugly function to create usermod_dirs for each site
-def write_usermods(lat,lon,site,start_year,end_year,
- start_date,start_year_actual,start_tod,atm_ncpl,stop_n):
-
- site_dir = os.path.join('../../cime_config/usermods_dirs/PLUMBER2/',site)
-
- if not os.path.isdir(site_dir):
- os.makedirs(site_dir, exist_ok=True)
-
- # create files in each directory
- include = os.path.join(site_dir,'include_user_mods')
- iFile = open(include, 'w') # or 'a' to add text instead of truncate
- iFile.write('../defaults')
- iFile.close()
-
- LAIstream = '\$DIN_LOC_ROOT/lnd/clm2/lai_streams/PLUMBER2/'+site+'/LAI_stream_'+site+'_'+ \
- str(start_year)+'-'+str(end_year)+'.nc'
- shell = os.path.join(site_dir,'shell_commands')
- sFile = open(shell, 'w') # or 'a' to add text instead of truncate
- sFile.write(
- #TODO turn on following line after cdeps changes are added
- #'./xmlchange PLUMBER2SITE='+site + '\n' \
- './xmlchange PTS_LON='+str(lon) + '\n' \
- './xmlchange PTS_LAT='+str(lat) + '\n' \
- './xmlchange DATM_YR_END='+str(end_year) + '\n' \
- './xmlchange START_TOD='+str(start_tod) + '\n' \
- './xmlchange ATM_NCPL='+str(atm_ncpl) + '\n' \
- '\n' \
- # TODO, get working for CTSM5.1, remove this line as it's redundant after PLUMBER2SITE is added
- # Alternatively, we can take this out of default/user_nl_clm since doing it this way is works fine TODO for 5.2
- 'echo "fsurdat=\'/glade/u/home/wwieder/CTSM/tools/site_and_regional/subset_data_single_point/surfdata_1x1_PLUMBER2_'+site+'_hist_16pfts_Irrig_CMIP6_simyr2000_c231005.nc \' " >> user_nl_clm \n' \
-
- 'echo "CLM_USRDAT.PLUMBER2:datafiles= \$DIN_LOC_ROOT/atm/datm7/CLM1PT_data/PLUMBER2/'+site+'/CLM1PT_data/CTSM_DATM_'+site+'_'+str(start_year)+'-'+str(end_year)+'.nc " >> user_nl_datm_streams \n' \
-
- 'echo "presaero.SSP3-7.0:year_first='+str(start_year) + '" >> user_nl_datm_streams \n' \
- 'echo "presaero.SSP3-7.0:year_last='+str(end_year) + '" >> user_nl_datm_streams \n' \
- 'echo "presaero.SSP3-7.0:year_align='+str(start_year) + '" >> user_nl_datm_streams \n' \
- '\n' \
-
- 'echo "presndep.SSP3-7.0:year_first='+str(start_year) + '" >> user_nl_datm_streams \n' \
- 'echo "presndep.SSP3-7.0:year_last='+str(end_year) + '" >> user_nl_datm_streams \n' \
- 'echo "presndep.SSP3-7.0:year_align='+str(start_year) + '" >> user_nl_datm_streams \n' \
- '\n' \
-
- 'echo "co2tseries.SSP3-7.0:year_first='+str(start_year) + '" >> user_nl_datm_streams \n' \
- 'echo "co2tseries.SSP3-7.0:year_last='+str(end_year) + '" >> user_nl_datm_streams \n' \
- 'echo "co2tseries.SSP3-7.0:year_align='+str(start_year) + '" >> user_nl_datm_streams \n' \
- '\n' \
-
- 'compset=`./xmlquery COMPSET --value` \n' \
- 'CLM_USRDAT_NAME=`./xmlquery CLM_USRDAT_NAME --value` \n' \
- 'TEST=`./xmlquery TEST --value` \n' \
- '\n' \
-
- '# For a transient case run the whole length and do not cycle \n' \
- 'if [[ $compset =~ ^HIST ]]; then \n' \
- ' # Number of years that can be run for the full transient case \n' \
- ' if [[ $TEST != "TRUE" ]]; then \n' \
- ' ./xmlchange STOP_N='+str(stop_n) + '\n' \
- ' fi \n' \
- ' # set start date for transient case with historical compset \n' \
- ' ./xmlchange RUN_STARTDATE='+str(start_date) + '\n' \
- ' ./xmlchange DATM_YR_ALIGN='+str(start_year_actual) + '\n' \
- ' ./xmlchange DATM_YR_START='+str(start_year_actual) + '\n' \
- 'else \n' \
- ' # for spinup case with I2000 compset \n' \
- ' ./xmlchange RUN_STARTDATE=0001-01-01' + '\n' \
- ' ./xmlchange DATM_YR_ALIGN='+str(1) + '\n' \
- ' ./xmlchange DATM_YR_START='+str(start_year) + '\n' \
- 'fi \n' \
- '\n' \
-
- '# Turn on LAI streams for a SP case \n' \
- 'if [[ $compset =~ .*CLM[0-9]+%[^_]*SP.* ]]; then \n' \
- ' echo "stream_fldfilename_lai=\''+LAIstream+'\'" >> user_nl_clm \n' \
- ' echo "stream_year_last_lai='+str(end_year) + '" >> user_nl_clm \n' \
- ' if [[ $compset =~ ^HIST ]]; then \n' \
- ' # for transient case with a historical compset \n' \
- ' echo "model_year_align_lai='+str(start_year_actual) + '" >> user_nl_clm \n' \
- ' echo "stream_year_first_lai='+str(start_year_actual) + '" >> user_nl_clm \n' \
- ' else \n' \
- ' # for a spinup case with a i2000 compset \n' \
- ' echo "model_year_align_lai=1" >> user_nl_clm \n' \
- ' echo "stream_year_first_lai='+str(start_year) + '" >> user_nl_clm \n' \
- ' fi \n' \
- 'fi \n'
- '\n' \
-
- )
-
- sFile.close()
-
- # add baseflow_scalar = 0 to user_nl_clm for wetland sites
- wetland = ["CZ-wet","DE-SfN","FI-Kaa","FI-Lom","RU-Che", \
- "SE-Deg","US-Los","US-Myb","US-Tw4","PL-wet"]
- if any(x == site for x in wetland):
- sFile = open(shell, 'a') # or 'a' to add text instead of truncate
- sFile.write(
- '\n' \
- '# set baseflow scalar to zero for wetland site \n' \
- 'echo "baseflow_scalar = 0" >> user_nl_clm'
- )
- sFile.close()
-
-# End write_usermods function
-
-def main():
- # For now we can just run the 'main' program as a loop
- plumber2_sites = pd.read_csv('PLUMBER2_sites.csv', skiprows=4)
-
- for i, row in tqdm.tqdm(plumber2_sites.iterrows()):
- lat = row['Lat']
- lon = row['Lon']
- site = row['Site']
- start_year = row['start_year']
- end_year = row['end_year']
- start_date = row['RUN_STARTDATE']
- start_year_actual = start_date[:4]
- start_tod = row['START_TOD']
- atm_ncpl = row['ATM_NCPL']
- stop_n = 1+end_year-start_year
-
- write_usermods(lat,lon,site,start_year,end_year,
- start_date,start_year_actual,start_tod,atm_ncpl,stop_n)
-
-if __name__ == "__main__":
- main()
-