From ce8ef3942e5e3134a68a7f7101be7bb21093fd01 Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Mon, 15 Apr 2024 11:40:35 -0500 Subject: [PATCH 01/40] Add TL319_r05_IcoswISC30E3r5 configuration --- cime_config/config_grids.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml index 909db8f42efe..847cf0e5e691 100755 --- a/cime_config/config_grids.xml +++ b/cime_config/config_grids.xml @@ -519,6 +519,16 @@ ARRM10to60E2r1 + + TL319 + r05 + IcoswISC30E3r5 + r05 + null + null + IcoswISC30E3r5 + + TL319 TL319 From cb21d80faed249c2a4ff96ce01c810c913f91458 Mon Sep 17 00:00:00 2001 From: dqwu Date: Fri, 3 May 2024 15:34:54 -0500 Subject: [PATCH 02/40] Adding optional argument to retrieve ncid from ncd_defvar_bygrid The subroutine ncd_defvar_bygrid internally uses a local varid to invoke ncd_defvar_bynf. Introduce a new argument to optionally return the varid to the caller. --- components/elm/src/main/ncdio_pio.F90.in | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/components/elm/src/main/ncdio_pio.F90.in b/components/elm/src/main/ncdio_pio.F90.in index 51160c3e8cc4..6fbaba0b7108 100644 --- a/components/elm/src/main/ncdio_pio.F90.in +++ b/components/elm/src/main/ncdio_pio.F90.in @@ -1070,7 +1070,7 @@ contains dim1name, dim2name, dim3name, dim4name, dim5name, & long_name, standard_name, units, cell_method, missing_value, fill_value, & imissing_value, ifill_value, switchdim, comment, & - flag_meanings, flag_values, nvalid_range ) + flag_meanings, flag_values, nvalid_range, varid ) ! ! !DESCRIPTION: ! Define a netcdf variable @@ -1097,12 +1097,13 @@ contains logical , intent(in), optional :: switchdim ! true=> permute dim1 and dim2 for output integer , intent(in), optional :: flag_values(:) ! attribute for int integer , intent(in), optional :: nvalid_range(2) ! attribute for int + integer , intent(out), optional :: varid ! returned var id ! ! !LOCAL VARIABLES: integer :: n ! indices integer :: ndims ! dimension counter integer :: dimid(5) ! dimension ids - integer :: varid ! variable id + integer :: varid_tmp ! variable id (temporary) integer :: itmp ! temporary character(len=256) :: str ! temporary character(len=*),parameter :: subname='ncd_defvar_bygrid' ! subroutine name @@ -1135,13 +1136,17 @@ contains end do end if - call ncd_defvar_bynf(ncid,varname,xtype,ndims,dimid,varid, & + call ncd_defvar_bynf(ncid,varname,xtype,ndims,dimid,varid_tmp, & long_name=long_name, standard_name=standard_name,units=units, cell_method=cell_method, & missing_value=missing_value, fill_value=fill_value, & imissing_value=imissing_value, ifill_value=ifill_value, & comment=comment, flag_meanings=flag_meanings, & flag_values=flag_values, nvalid_range=nvalid_range ) + if (present(varid)) then + varid = varid_tmp + end if + end subroutine ncd_defvar_bygrid !------------------------------------------------------------------------ From 60344fd6bf72c7f4e3701f6c46d2b31fe7be4741 Mon Sep 17 00:00:00 2001 From: dqwu Date: Fri, 3 May 2024 16:03:06 -0500 Subject: [PATCH 03/40] Simplifying PIO calls for interface restartvar in ELM This change optimizes the restartvar code in ELM with the following improvements: * Utilization of the ncid returned by subroutine ncd_defvar_bygrid, eliminating the need for calling PIO_inq_varid. * Introduction of additional optional arguments in the call to ncd_defvar_bygrid to internally set corresponding attributes. * Elimination of some redundant PIO_put_att calls that are already handled within ncd_defvar_bygrid. --- components/elm/src/utils/restUtilMod.F90.in | 189 ++++++++------------ 1 file changed, 75 insertions(+), 114 deletions(-) diff --git a/components/elm/src/utils/restUtilMod.F90.in b/components/elm/src/utils/restUtilMod.F90.in index 7bb9dd04093c..47e3bc87a76c 100644 --- a/components/elm/src/utils/restUtilMod.F90.in +++ b/components/elm/src/utils/restUtilMod.F90.in @@ -68,7 +68,6 @@ contains ! ! Local variables integer :: ivalue - type(var_desc_t) :: vardesc ! local vardesc integer :: status ! return error code integer :: varid integer :: lxtype ! local external type (in case logical variable) @@ -84,10 +83,10 @@ contains end if call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, & - long_name=trim(long_name), units=units) - - status = PIO_inq_varid(ncid, trim(varname), vardesc) - varid = vardesc%varid + long_name=trim(long_name), units=units, comment=comment, & + fill_value=fill_value, missing_value=missing_value, & + ifill_value=ifill_value, imissing_value=imissing_value, & + varid=varid) if (trim(interpinic_flag) == 'interp') then status = PIO_put_att(ncid, varid, 'interpinic_flag', iflag_interp) @@ -99,36 +98,25 @@ contains status = PIO_put_att(ncid, varid, 'interpinic_flag_meanings', & "1=nearest neighbor, 2=copy directly, 3=skip") - if (present(comment)) then - call ncd_putatt(ncid, varid, 'comment', trim(comment)) - end if - if (present(units)) then - call ncd_putatt(ncid, varid, 'units', trim(units)) - end if - if (present(fill_value)) then - call ncd_putatt(ncid, varid, '_FillValue', fill_value, lxtype) - else if (lxtype == ncd_double) then - call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype) + if (.not. present(fill_value)) then + if (lxtype == ncd_double) then + call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype) + end if end if - if (present(missing_value)) then - call ncd_putatt(ncid, varid, 'missing_value', missing_value, lxtype) - else if (lxtype == ncd_double) then - call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype) + if (.not. present(missing_value)) then + if (lxtype == ncd_double) then + call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype) + end if end if - if (present(ifill_value)) then - call ncd_putatt(ncid, varid, '_FillValue', ifill_value, lxtype) - else if (lxtype == ncd_int) then - call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype) + if (.not. present(ifill_value)) then + if (lxtype == ncd_int) then + call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype) + end if end if - if (present(imissing_value)) then - call ncd_putatt(ncid, varid, 'missing_value', imissing_value, lxtype) - else if (lxtype == ncd_int) then - call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype) - end if - if ( xtype == ncd_log )then - status = PIO_put_att(ncid,varid,'flag_values', (/0, 1/) ) - status = PIO_put_att(ncid,varid,'flag_meanings', "FALSE TRUE" ) - status = PIO_put_att(ncid,varid,'valid_range', (/0, 1/) ) + if (.not. present(imissing_value)) then + if (lxtype == ncd_int) then + call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype) + end if end if else if (flag == 'read' .or. flag == 'write') then @@ -178,7 +166,6 @@ contains ! ! Local variables integer :: ivalue - type(var_desc_t) :: vardesc ! local vardesc integer :: status ! return error code integer :: varid integer :: lxtype ! local external type (in case logical variable) @@ -195,20 +182,23 @@ contains if (.not. present(dim1name)) then call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, & - long_name=trim(long_name), units=units) + long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, & + missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, & + nvalid_range=nvalid_range, varid=varid) else if (.not. present(dim2name)) then call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, & dim1name=trim(dim1name), & - long_name=trim(long_name), units=units) + long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, & + missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, & + nvalid_range=nvalid_range, varid=varid) else if (present(dim2name)) then call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, & dim1name=trim(dim1name), dim2name=trim(dim2name), & - long_name=trim(long_name), units=units) + long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, & + missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, & + nvalid_range=nvalid_range, varid=varid) end if - status = PIO_inq_varid(ncid, trim(varname), vardesc) - varid = vardesc%varid - if (trim(interpinic_flag) == 'interp') then status = PIO_put_att(ncid, varid, 'interpinic_flag', iflag_interp) else if (trim(interpinic_flag) == 'copy') then @@ -219,40 +209,25 @@ contains status = PIO_put_att(ncid, varid, 'interpinic_flag_meanings', & "1=nearest neighbor, 2=copy directly, 3=skip") - if (present(comment)) then - call ncd_putatt(ncid, varid, 'comment', trim(comment)) + if (.not. present(fill_value)) then + if (lxtype == ncd_double) then + call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype) + end if end if - if (present(units)) then - call ncd_putatt(ncid, varid, 'units', trim(units)) + if (.not. present(missing_value)) then + if (lxtype == ncd_double) then + call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype) + end if end if - - if (present(fill_value)) then - call ncd_putatt(ncid, varid, '_FillValue', fill_value, lxtype) - else if (lxtype == ncd_double) then - call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype) - end if - if (present(missing_value)) then - call ncd_putatt(ncid, varid, 'missing_value', missing_value, lxtype) - else if (lxtype == ncd_double) then - call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype) - end if - if (present(ifill_value)) then - call ncd_putatt(ncid, varid, '_FillValue', ifill_value, lxtype) - else if (lxtype == ncd_int) then - call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype) + if (.not. present(ifill_value)) then + if (lxtype == ncd_int) then + call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype) + end if end if - if (present(imissing_value)) then - call ncd_putatt(ncid, varid, 'missing_value', imissing_value, lxtype) - else if (lxtype == ncd_int) then - call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype) - end if - if (present(nvalid_range)) then - status = PIO_put_att(ncid,varid,'valid_range', nvalid_range ) - end if - if ( xtype == ncd_log )then - status = PIO_put_att(ncid,varid,'flag_values', (/0, 1/) ) - status = PIO_put_att(ncid,varid,'flag_meanings', "FALSE TRUE" ) - status = PIO_put_att(ncid,varid,'valid_range', (/0, 1/) ) + if (.not. present(imissing_value)) then + if (lxtype == ncd_int) then + call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype) + end if end if else if (flag == 'read' .or. flag == 'write') then @@ -309,7 +284,6 @@ contains ! ! Local variables integer :: ivalue - type(var_desc_t) :: vardesc ! local vardesc integer :: status ! return error code integer :: varid ! returned var id integer :: lxtype ! local external type (in case logical variable) @@ -327,15 +301,16 @@ contains if (switchdim) then call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, & dim1name=trim(dim2name), dim2name=trim(dim1name), & - long_name=trim(long_name), units=units) + long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, & + missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, & + nvalid_range=nvalid_range, varid=varid) else call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, & dim1name=trim(dim1name), dim2name=trim(dim2name), & - long_name=trim(long_name), units=units) + long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, & + missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, & + nvalid_range=nvalid_range, varid=varid) end if - status = PIO_inq_varid(ncid, trim(varname), vardesc) - - varid = vardesc%varid if (trim(interpinic_flag) == 'interp') then status = PIO_put_att(ncid, varid, 'interpinic_flag', iflag_interp) @@ -348,49 +323,35 @@ contains "1=>nearest_neighbor 2=>copy 3=>skip") if (switchdim) then - status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag', 1) + status = PIO_put_att(ncid, varid, 'switchdim_flag', 1) else - status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag', 0) + status = PIO_put_att(ncid, varid, 'switchdim_flag', 0) end if - status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag_values', (/0,1/)) - status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag_is_0', & + status = PIO_put_att(ncid, varid, 'switchdim_flag_values', (/0,1/)) + status = PIO_put_att(ncid, varid, 'switchdim_flag_is_0', & "1st and 2nd dims are same as model representation") - status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag_is_1', & + status = PIO_put_att(ncid, varid, 'switchdim_flag_is_1', & "1st and 2nd dims are switched from model representation") - if (present(comment)) then - call ncd_putatt(ncid, varid, 'comment', trim(comment)) - end if - if (present(units)) then - call ncd_putatt(ncid, varid, 'units', trim(units)) - end if - if (present(fill_value)) then - call ncd_putatt(ncid, varid, '_FillValue', fill_value, lxtype) - else if (lxtype == ncd_double) then - call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype) - end if - if (present(missing_value)) then - call ncd_putatt(ncid, varid, 'missing_value', missing_value, lxtype) - else if (lxtype == ncd_double) then - call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype) - end if - if (present(ifill_value)) then - call ncd_putatt(ncid, varid, '_FillValue', ifill_value, lxtype) - else if (lxtype == ncd_int) then - call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype) - end if - if (present(imissing_value)) then - call ncd_putatt(ncid, varid, 'missing_value', imissing_value, lxtype) - else if (lxtype == ncd_int) then - call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype) - end if - if (present(nvalid_range)) then - status = PIO_put_att(ncid,varid,'valid_range', nvalid_range ) - end if - if ( xtype == ncd_log )then - status = PIO_put_att(ncid,varid,'flag_values', (/0, 1/) ) - status = PIO_put_att(ncid,varid,'flag_meanings', "FALSE TRUE" ) - status = PIO_put_att(ncid,varid,'valid_range', (/0, 1/) ) + if (.not. present(fill_value)) then + if (lxtype == ncd_double) then + call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype) + end if + end if + if (.not. present(missing_value)) then + if (lxtype == ncd_double) then + call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype) + end if + end if + if (.not. present(ifill_value)) then + if (lxtype == ncd_int) then + call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype) + end if + end if + if (.not. present(imissing_value)) then + if (lxtype == ncd_int) then + call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype) + end if end if else From 0fd52f61db69a6516a171ff7f2d99931c7d88396 Mon Sep 17 00:00:00 2001 From: dqwu Date: Tue, 14 May 2024 10:34:51 -0500 Subject: [PATCH 04/40] Update modules for intel/gnu,impi/mvapich on Bebop Update modules for intel/gnu,impi/mvapich on Bebop to get system software more update-to-date. --- cime_config/machines/config_machines.xml | 49 +++++++++++++++--------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/cime_config/machines/config_machines.xml b/cime_config/machines/config_machines.xml index f13901830891..4381de43234a 100644 --- a/cime_config/machines/config_machines.xml +++ b/cime_config/machines/config_machines.xml @@ -2854,34 +2854,45 @@ anaconda3/5.2.0 - intel/18.0.4-443hhug - intel-mkl/2018.4.274-jwaeshj - hdf5/1.10.5-3mk3uik - netcdf/4.7.0-krelxcz - netcdf-fortran/4.4.5-74lj75q + gcc/7.4.0 + intel/20.0.4-lednsve + intel-mkl/2020.4.304-voqlapk - intel-mpi/2018.4.274-4hmwfl6 - parallel-netcdf/1.11.0-acswzws + intel-mpi/2019.9.304-i42whlw + hdf5/1.10.7-ugvomvt + netcdf-c/4.4.1-blyisdg + netcdf-cxx/4.2-gkqc6fq + netcdf-fortran/4.4.4-eanrh5t + parallel-netcdf/1.11.0-y3nmmej - mvapich2/2.3.1-verbs-omjz3ck - parallel-netcdf/1.11.2-7fy6qz3 + mvapich2/2.3.6-verbs-x4iz7lq + hdf5/1.10.7-igh6foh + netcdf-c/4.4.1-gei7x7w + netcdf-cxx/4.2-db2f5or + netcdf-fortran/4.4.4-b4ldb3a + parallel-netcdf/1.11.0-kj4jsvt - gcc/8.2.0-g7hppkz - intel-mkl/2018.4.274-2amycpi - hdf5/1.8.16-mz7lmxh - netcdf/4.4.1-xkjcghm - netcdf-fortran/4.4.4-mpstomu + gcc/8.2.0-xhxgy33 + intel-mkl/2020.4.304-d6zw4xa - intel-mpi/2018.4.274-ozfo327 - parallel-netcdf/1.11.0-filvnis + intel-mpi/2019.9.304-rxpzd6p + hdf5/1.10.7-oy6d2nm + netcdf-c/4.4.1-fysjgfx + netcdf-cxx/4.2-oaiw2v6 + netcdf-fortran/4.4.4-kxgkaop + parallel-netcdf/1.11.0-fce7akl - mvapich2/2.3-bebop-3xi4hiu - parallel-netcdf/1.11.2-hfn33fd + mvapich2/2.3-bebop-a66r4jf + hdf5/1.10.5-ejeshwh + netcdf/4.4.1-ve2zfkw + netcdf-cxx/4.2-2rkopdl + netcdf-fortran/4.4.4-thtylny + parallel-netcdf/1.11.0-kozyofv $CIME_OUTPUT_ROOT/$CASE/run @@ -2896,7 +2907,7 @@ $SHELL{dirname $(dirname $(which pnetcdf_version))} - + $SHELL{which h5dump | xargs dirname | xargs dirname} From a67b671da02a2e1af239fc06dbd86dae5ed0cfbd Mon Sep 17 00:00:00 2001 From: Alice Barthel Date: Tue, 14 May 2024 14:41:21 -0700 Subject: [PATCH 05/40] added under_sea_ice restoring for G-cases by default --- components/mpas-ocean/bld/build-namelist | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/mpas-ocean/bld/build-namelist b/components/mpas-ocean/bld/build-namelist index c8e1c1e3a7a4..d4af8f86d995 100755 --- a/components/mpas-ocean/bld/build-namelist +++ b/components/mpas-ocean/bld/build-namelist @@ -1024,7 +1024,7 @@ if ($OCN_FORCING eq 'datm_forced_restoring') { add_default($nl, 'config_use_surface_salinity_monthly_restoring', 'val'=>".true."); add_default($nl, 'config_salinity_restoring_constant_piston_velocity', 'val'=>"1.585e-6"); add_default($nl, 'config_salinity_restoring_max_difference', 'val'=>"100."); - add_default($nl, 'config_salinity_restoring_under_sea_ice', 'val'=>".false."); + add_default($nl, 'config_salinity_restoring_under_sea_ice', 'val'=>".true."); } else { add_default($nl, 'config_use_activeTracers_surface_restoring'); add_default($nl, 'config_use_surface_salinity_monthly_restoring'); From 6480da5b493b94a12bab67418f8478597e108cc1 Mon Sep 17 00:00:00 2001 From: Alice Barthel Date: Tue, 14 May 2024 14:56:18 -0700 Subject: [PATCH 06/40] modified sss restoring to dt by default --- .../mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml | 2 +- .../mpas-ocean/src/tracer_groups/Registry_activeTracers.xml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml index 248654ef10f5..98c5b18ce179 100644 --- a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml +++ b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml @@ -660,7 +660,7 @@ .false. .false. .false. -'0000-00-01_00:00:00' +'dt' 1.585e-6 0.5 .false. diff --git a/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml b/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml index 3b5344391480..a4c032bb4781 100644 --- a/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml +++ b/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml @@ -31,9 +31,9 @@ description="If true, apply monthly salinity restoring using a uniform piston velocity, defined at run-time by config_salinity_restoring_constant_piston_velocity. When false, salinity piston velocity is specified in the input file by salinityPistonVelocity, which may be spatially variable." possible_values=".true. or .false." /> - Date: Tue, 14 May 2024 15:34:41 -0700 Subject: [PATCH 07/40] modified for under sea ice restoring to be default in all cases --- .../bld/namelist_files/namelist_defaults_mpaso.xml | 2 +- .../src/shared/mpas_ocn_tracer_surface_restoring.F | 10 ++++++---- .../src/tracer_groups/Registry_activeTracers.xml | 4 ++-- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml index 98c5b18ce179..4590e8c3c8a1 100644 --- a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml +++ b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml @@ -663,7 +663,7 @@ 'dt' 1.585e-6 0.5 -.false. +.true. .false. diff --git a/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F b/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F index f47608ef0d9d..cc014b3f2362 100644 --- a/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F +++ b/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F @@ -344,7 +344,8 @@ subroutine ocn_get_surfaceSalinityData( streamManager, & if (config_salinity_restoring_under_sea_ice) then - ! Simulation has landIceMask AND config_salinity_restoring_under_sea_ice=.true. + ! Simulation has landIceMask AND + ! config_salinity_restoring_under_sea_ice=.true. (default) do iCell = 1, nCells if (landIceMask(iCell)==1) then ! Turn off salinity restoring in this cell @@ -362,7 +363,7 @@ subroutine ocn_get_surfaceSalinityData( streamManager, & else ! config_salinity_restoring_under_sea_ice = .false. - ! Simulation has landIceMask AND config_salinity_restoring_under_sea_ice=.false. (default) + ! Simulation has landIceMask AND config_salinity_restoring_under_sea_ice=.false. do iCell = 1, nCells if (landIceMask(iCell)==1) then ! Turn off salinity restoring in this cell @@ -386,7 +387,8 @@ subroutine ocn_get_surfaceSalinityData( streamManager, & if (config_salinity_restoring_under_sea_ice) then - ! Simulation has NO landIceMask AND config_salinity_restoring_under_sea_ice=.true. + ! Simulation has NO landIceMask AND + ! config_salinity_restoring_under_sea_ice=.true. (default) do iCell = 1, nCells deltaS = surfaceSalinityMonthlyClimatologyValue(iCell) - activeTracers(indexSalinity,1,iCell) if (deltaS > config_salinity_restoring_max_difference) deltaS = config_salinity_restoring_max_difference @@ -398,7 +400,7 @@ subroutine ocn_get_surfaceSalinityData( streamManager, & else ! config_salinity_restoring_under_sea_ice = .false. - ! Simulation has NO landIceMask AND config_salinity_restoring_under_sea_ice=.false. (default) + ! Simulation has NO landIceMask AND config_salinity_restoring_under_sea_ice=.false. do iCell = 1, nCells deltaS = surfaceSalinityMonthlyClimatologyValue(iCell) - activeTracers(indexSalinity,1,iCell) if (deltaS > config_salinity_restoring_max_difference) deltaS = config_salinity_restoring_max_difference diff --git a/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml b/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml index a4c032bb4781..916df21b70cc 100644 --- a/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml +++ b/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml @@ -43,8 +43,8 @@ description="Maximum allowable difference between surface salinity and climatology, in grams salt per kilogram seawater." possible_values="any non-negative number" /> - From c9871408a76eb933fdb0edee4e755ca38a03025e Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Tue, 2 Apr 2024 10:29:59 -0400 Subject: [PATCH 08/40] Adding hooks for history dimension control in fates --- components/elm/bld/ELMBuildNamelist.pm | 5 +- .../bld/namelist_files/namelist_defaults.xml | 2 +- .../namelist_files/namelist_definition.xml | 13 ++ components/elm/src/main/controlMod.F90 | 6 +- components/elm/src/main/elm_varctl.F90 | 15 +- .../elm/src/main/elmfates_interfaceMod.F90 | 143 +++++++++++++++--- components/elm/src/main/histFileMod.F90 | 2 +- 7 files changed, 156 insertions(+), 30 deletions(-) diff --git a/components/elm/bld/ELMBuildNamelist.pm b/components/elm/bld/ELMBuildNamelist.pm index c6214323525b..250d8666d25a 100755 --- a/components/elm/bld/ELMBuildNamelist.pm +++ b/components/elm/bld/ELMBuildNamelist.pm @@ -798,7 +798,7 @@ sub setup_cmdl_fates_mode { "use_fates_inventory_init", "use_fates_fixed_biogeog", "use_fates_nocomp","use_fates_sp", "fates_inventory_ctrl_filename","use_fates_logging", "use_fates_tree_damage", "use_fates_parteh_mode","use_fates_cohort_age_tracking","use_snicar_ad", "use_fates_luh", - "fluh_timeseries"); + "fluh_timeseries","fates_history_dimlevel"); foreach my $var ( @list ) { if ( defined($nl->get_value($var)) ) { $nl_flags->{$var} = $nl->get_value($var); @@ -3300,7 +3300,8 @@ sub setup_logic_fates { add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_luh', 'use_fates'=>$nl_flags->{'use_fates'}); add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fates_paramfile', 'phys'=>$nl_flags->{'phys'}); add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fluh_timeseries', 'phys'=>$nl_flags->{'phys'}); - + add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fates_history_dimlevel','use_fates'=>$nl_flags->{'use_fates'}); + # For FATES SP mode make sure no-competion, and fixed-biogeography are also set # And also check for other settings that can't be trigged on as well my $var = "use_fates_sp"; diff --git a/components/elm/bld/namelist_files/namelist_defaults.xml b/components/elm/bld/namelist_files/namelist_defaults.xml index 8294fdeeb5af..23d05a8f7721 100644 --- a/components/elm/bld/namelist_files/namelist_defaults.xml +++ b/components/elm/bld/namelist_files/namelist_defaults.xml @@ -2191,7 +2191,7 @@ this mask will have smb calculated over the entire global land surface .false. .true. .false. - +2,2 .true. .true. .false. diff --git a/components/elm/bld/namelist_files/namelist_definition.xml b/components/elm/bld/namelist_files/namelist_definition.xml index 0f8b03dfec38..955fddcdb792 100644 --- a/components/elm/bld/namelist_files/namelist_definition.xml +++ b/components/elm/bld/namelist_files/namelist_definition.xml @@ -395,6 +395,19 @@ Full pathname of unified land use harmonization data file. This causes the land- types to vary over time. + +Setting for what types of FATES history to be allocate and +calculated at the dynamics timestep (1st integer) and the +model timestep (2nd integer). This must be consistent with +hist_fincl*, ie output variables must not be listed if the +output level is not enabled. +0 = no fates history variables are calculated or allocated +1 = only time x space (3d) fates history variables allowed +2 = multiplexed dimensioned fates history is also allowed +(Only relevant if FATES is on) + + Toggle to turn on if Kennedy et al plant hydraulics model is used. diff --git a/components/elm/src/main/controlMod.F90 b/components/elm/src/main/controlMod.F90 index 3b8c08be31bd..8f763a9ebd4d 100755 --- a/components/elm/src/main/controlMod.F90 +++ b/components/elm/src/main/controlMod.F90 @@ -263,7 +263,8 @@ subroutine control_init( ) fluh_timeseries, & fates_parteh_mode, & fates_seeddisp_cadence, & - use_fates_tree_damage + use_fates_tree_damage, & + fates_history_dimlevel namelist /elm_inparm / use_betr @@ -818,7 +819,8 @@ subroutine control_spmd() call mpi_bcast (fates_parteh_mode, 1, MPI_INTEGER, 0, mpicom, ier) call mpi_bcast (fates_seeddisp_cadence, 1, MPI_INTEGER, 0, mpicom, ier) call mpi_bcast (use_fates_tree_damage, 1, MPI_LOGICAL, 0, mpicom, ier) - + call mpi_bcast (fates_history_dimlevel, 2, MPI_INTEGER, 0, mpicom, ier) + call mpi_bcast (use_betr, 1, MPI_LOGICAL, 0, mpicom, ier) call mpi_bcast (use_lai_streams, 1, MPI_LOGICAL, 0, mpicom, ier) diff --git a/components/elm/src/main/elm_varctl.F90 b/components/elm/src/main/elm_varctl.F90 index d5a61da8fdbf..7fa580b19f28 100644 --- a/components/elm/src/main/elm_varctl.F90 +++ b/components/elm/src/main/elm_varctl.F90 @@ -240,7 +240,20 @@ module elm_varctl integer, public :: fates_seeddisp_cadence = iundef ! 0 => no seed dispersal across gridcells ! 1, 2, 3 => daily, monthly, or yearly seed dispersal - + ! FATES history dimension level + ! fates can produce history at either the daily timescale (dynamics) + ! and the model step timescale. It can also generate output on the extra dimension + ! Performing this output can be expensive, so we allow different history dimension + ! levels. + ! The first index is output at the model timescale + ! The second index is output at the dynamics (daily) timescale + ! 0 - no output + ! 1 - include only column level means (3D) + ! 2 - include output that includes the 4th dimension + + integer, dimension(2), public :: fates_history_dimlevel = (/2,2/) + + !---------------------------------------------------------- ! BeTR switches !---------------------------------------------------------- diff --git a/components/elm/src/main/elmfates_interfaceMod.F90 b/components/elm/src/main/elmfates_interfaceMod.F90 index 9b9fbc6e3903..bb8b8d93dc48 100644 --- a/components/elm/src/main/elmfates_interfaceMod.F90 +++ b/components/elm/src/main/elmfates_interfaceMod.F90 @@ -59,6 +59,7 @@ module ELMFatesInterfaceMod use elm_varctl , only : use_fates_tree_damage use elm_varctl , only : nsrest, nsrBranch use elm_varctl , only : fates_inventory_ctrl_filename + use elm_varctl , only : fates_history_dimlevel use elm_varctl , only : use_lch4 use elm_varctl , only : use_century_decomp use elm_varcon , only : tfrz @@ -137,7 +138,7 @@ module ELMFatesInterfaceMod use FatesHistoryInterfaceMod, only : fates_hist use FatesRestartInterfaceMod, only : fates_restart_interface_type use FatesInterfaceTypesMod, only : hlm_num_luh2_states - + use FatesIOVariableKindMod, only : group_dyna_simple, group_dyna_complx use PRTGenericMod , only : num_elements use FatesPatchMod , only : fates_patch_type use FatesDispersalMod , only : lneighbors, dispersal_type, IsItDispersalTime @@ -284,6 +285,7 @@ module ELMFatesInterfaceMod public :: ELMFatesGlobals1 public :: ELMFatesGlobals2 public :: ELMFatesTimesteps + public :: CrossRefHistoryFields contains @@ -427,6 +429,9 @@ subroutine ELMFatesGlobals2() call set_fates_ctrlparms('parteh_mode',ival=fates_parteh_mode) call set_fates_ctrlparms('seeddisp_cadence',ival=fates_seeddisp_cadence) + call set_fates_ctrlparms('hist_hifrq_dimlevel',ival=fates_history_dimlevel(1)) + call set_fates_ctrlparms('hist_dynam_dimlevel',ival=fates_history_dimlevel(2)) + if(use_fates_tree_damage)then pass_tree_damage = 1 else @@ -590,6 +595,84 @@ subroutine ELMFatesGlobals2() return end subroutine ELMFatesGlobals2 + + ! =================================================================================== + + subroutine CrossRefHistoryFields + + ! This routine only needs to be called on the masterproc. + ! Here we cross reference the ELM history master + ! list and make sure that all fields that start + ! with fates have been allocated. If it has + ! not, then we give a more constructive error + ! message than what is possible in PIO. The user + ! most likely needs to increase the history density + ! level + + use histFileMod, only: getname + use histFileMod, only: hist_fincl1,hist_fincl2,hist_fincl3,hist_fincl4 + use histFileMod, only: hist_fincl5,hist_fincl6 + use histFileMod, only: max_tapes, max_flds, max_namlen + + integer :: t ! iterator index for history tapes + integer :: f ! iterator index for registered history field names + integer :: nh ! iterator index for fates registered history + logical :: is_fates_field ! Does this start with FATES_ ? + logical :: found ! if true, than the history field is either + ! not part of the fates set, or was found in + ! the fates set + character(len=64) :: fincl_name + ! This is a copy of the public in histFileMod, copied + ! here because it isn't filled at the time of this call + character(len=max_namlen+2) :: fincl(max_flds,max_tapes) + + fincl(:,1) = hist_fincl1(:) + fincl(:,2) = hist_fincl2(:) + fincl(:,3) = hist_fincl3(:) + fincl(:,4) = hist_fincl4(:) + fincl(:,5) = hist_fincl5(:) + fincl(:,6) = hist_fincl6(:) + + do t = 1,max_tapes + + f = 1 + search_fields: do while (f < max_flds .and. fincl(f,t) /= ' ') + + fincl_name = getname(fincl(f,t)) + is_fates_field = fincl_name(1:6)=='FATES_' + + if(is_fates_field) then + found = .false. + do_fates_hist: do nh = 1,fates_hist%num_history_vars() + if(trim(fates_hist%hvars(nh)%vname) == & + trim(fincl_name)) then + found=.true. + exit do_fates_hist + end if + end do do_fates_hist + + if(.not.found)then + write(iulog,*) 'the history field: ',trim(fincl_name) + write(iulog,*) 'was requested in the namelist, but was' + write(iulog,*) 'not found in the list of fates_hist%hvars.' + write(iulog,*) 'Most likely, this is because this history variable' + write(iulog,*) 'was specified in the user namelist, but the user' + write(iulog,*) 'specified a FATES history output dimension level' + write(iulog,*) 'that does not contain that variable in its valid set.' + write(iulog,*) 'You may have to increase the namelist setting: fates_history_dimlevel' + write(iulog,*) 'current fates_history_dimlevel: ',fates_history_dimlevel(:) + !uncomment if you want to list all fates history variables in registry + !do_fates_hist2: do nh = 1,fates_hist%num_history_vars() + ! write(iulog,*) trim(fates_hist%hvars(nh)%vname) + !end do do_fates_hist2 + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if + end if + f = f + 1 + end do search_fields + + end do + end subroutine CrossRefHistoryFields ! ==================================================================================== @@ -1097,11 +1180,8 @@ subroutine dynamics_driv(this, bounds_clump, top_as_inst, & ! Flush arrays to values defined by %flushval (see registry entry in ! subroutine define_history_vars() ! --------------------------------------------------------------------------------- - call fates_hist%flush_hvars(nc,upfreq_in=1) - - ! Frequency 5 is routine that processes FATES history - ! on the dynamics (daily) step, but before disturbance - call fates_hist%flush_hvars(nc,upfreq_in=5) + call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_simple) + call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_complx) ! --------------------------------------------------------------------------------- ! Part II: Call the FATES model now that input boundary conditions have been @@ -1791,14 +1871,22 @@ subroutine restart( this, bounds_proc, ncid, flag, & ! ------------------------------------------------------------------------ ! Update history IO fields that depend on ecosystem dynamics ! ------------------------------------------------------------------------ - call fates_hist%flush_hvars(nc,upfreq_in=1) - call fates_hist%flush_hvars(nc,upfreq_in=5) - do s = 1,this%fates(nc)%nsites - call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & - upfreq_in=1) - call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & - upfreq_in=5) - end do + + if(fates_history_dimlevel(2)>0) then + call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_simple) + do s = 1,this%fates(nc)%nsites + call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & + upfreq_in=group_dyna_simple) + end do + if(fates_history_dimlevel(2)>1) then + call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_complx) + do s = 1,this%fates(nc)%nsites + call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & + upfreq_in=group_dyna_complx) + end do + end if + end if + call fates_hist%update_history_dyn( nc, & this%fates(nc)%nsites, & this%fates(nc)%sites, & @@ -1971,15 +2059,21 @@ subroutine init_coldstart(this, canopystate_inst, soilstate_inst, frictionvel_in ! ------------------------------------------------------------------------ ! Update history IO fields that depend on ecosystem dynamics ! ------------------------------------------------------------------------ - - call fates_hist%flush_hvars(nc,upfreq_in=1) - call fates_hist%flush_hvars(nc,upfreq_in=5) - do s = 1,this%fates(nc)%nsites - call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & - upfreq_in=1) - call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & - upfreq_in=5) - end do + if(fates_history_dimlevel(2)>0) then + call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_simple) + do s = 1,this%fates(nc)%nsites + call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & + upfreq_in=group_dyna_simple) + end do + if(fates_history_dimlevel(2)>1) then + call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_complx) + do s = 1,this%fates(nc)%nsites + call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), & + upfreq_in=group_dyna_complx) + end do + end if + end if + call fates_hist%update_history_dyn( nc, & this%fates(nc)%nsites, & this%fates(nc)%sites, & @@ -2762,6 +2856,7 @@ subroutine wrap_update_hifrq_hist(this, bounds_clump ) this%fates(nc)%nsites, & this%fates(nc)%sites, & this%fates(nc)%bc_in, & + this%fates(nc)%bc_out, & dtime) @@ -3068,6 +3163,8 @@ subroutine init_history_io(this,bounds_proc) call fates_hist%initialize_history_vars() nvar = fates_hist%num_history_vars() + call CrossRefHistoryFields() + do ivar = 1, nvar associate( vname => fates_hist%hvars(ivar)%vname, & diff --git a/components/elm/src/main/histFileMod.F90 b/components/elm/src/main/histFileMod.F90 index c11eafe96f45..1b86c3a1618a 100644 --- a/components/elm/src/main/histFileMod.F90 +++ b/components/elm/src/main/histFileMod.F90 @@ -148,7 +148,7 @@ module histFileMod private :: hist_set_snow_field_2d ! Set values in history field dimensioned by levsno private :: list_index ! Find index of field in exclude list private :: set_hist_filename ! Determine history dataset filenames - private :: getname ! Retrieve name portion of input "inname" + public :: getname ! Retrieve name portion of input "inname" (PUBLIC for FATES) private :: getflag ! Retrieve flag private :: pointer_index ! Track data pointer indices private :: max_nFields ! The max number of fields on any tape From 93a0c8b8fa20d07983149308bf5439c5e4374256 Mon Sep 17 00:00:00 2001 From: Ryan Knox Date: Wed, 15 May 2024 09:40:37 -0400 Subject: [PATCH 09/40] Updated fates external pointer to sci.1.76.3_api.35.1.0 --- components/elm/src/external_models/fates | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/elm/src/external_models/fates b/components/elm/src/external_models/fates index 42d804ba54d0..f0185f7c7033 160000 --- a/components/elm/src/external_models/fates +++ b/components/elm/src/external_models/fates @@ -1 +1 @@ -Subproject commit 42d804ba54d0cf013a9737018ff9920e0c9808ea +Subproject commit f0185f7c7033fa69c80d1ddb07cbcbf1f8be1adc From 1a5f91dac629fd68bc3cd42b4cc7e9de7c85bbd3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 28 May 2024 05:50:32 -0500 Subject: [PATCH 10/40] Change high-frequency output to `append` mode This prevents the last entry from the previous run before a restart from being clobbered by a new run. --- components/mpas-ocean/cime_config/buildnml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/mpas-ocean/cime_config/buildnml b/components/mpas-ocean/cime_config/buildnml index 47076dee726a..f0330621d54b 100755 --- a/components/mpas-ocean/cime_config/buildnml +++ b/components/mpas-ocean/cime_config/buildnml @@ -859,7 +859,7 @@ def buildnml(case, caseroot, compname): lines.append(' filename_interval="00-01-00_00:00:00"') lines.append(' reference_time="01-01-01_00:00:00"') lines.append(' output_interval="00-00-05_00:00:00"') - lines.append(' clobber_mode="truncate"') + lines.append(' clobber_mode="append"') lines.append(' packages="highFrequencyOutputAMPKG">') lines.append('') lines.append(' ') From 7977c372dc2922286a9fee9ccb15c8c7af6b73c9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 11 Dec 2023 21:46:11 -0800 Subject: [PATCH 11/40] Add RRSwISC6to18E3r5 to MPAS-Ocean and -Seaice --- .../namelist_defaults_mpaso.xml | 16 ++++++ components/mpas-ocean/cime_config/buildnml | 49 ++++++++++++++----- .../namelist_defaults_mpassi.xml | 4 ++ components/mpas-seaice/cime_config/buildnml | 24 +++++++-- 4 files changed, 76 insertions(+), 17 deletions(-) diff --git a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml index 2880bf2ce5cb..ffd45353943e 100644 --- a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml +++ b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml @@ -54,6 +54,7 @@ '00:04:00' '00:02:00' '00:01:00' +'00:05:00' 'split_explicit_ab2' 2 @@ -83,6 +84,7 @@ .true. .true. .true. +.true. -1.0 .false. 30.0e3 @@ -149,6 +151,7 @@ 4.37e08 5.46e07 6.83e06 +3.2e09 1.0 .false. 0.0 @@ -165,6 +168,7 @@ .false. .false. .false. +.false. 'constant' 400.0 400.0 @@ -203,6 +207,7 @@ .false. .false. .false. +.false. 'EdenGreatbatch' 'constant' 'constant' @@ -399,6 +404,7 @@ 'pressure_only' 'pressure_only' 'pressure_only' +'pressure_only' 'Jenkins' .false. 10.0 @@ -417,6 +423,7 @@ 4.48e-3 4.48e-3 4.48e-3 +4.48e-3 1e-4 5e-2 0.011 @@ -430,6 +437,7 @@ 0.00295 0.00295 0.00295 +0.00295 3.1e-4 8.42e-5 8.42e-5 @@ -441,6 +449,7 @@ 8.42e-5 8.42e-5 8.42e-5 +8.42e-5 'flux-form' @@ -469,6 +478,7 @@ 4.48e-3 4.48e-3 4.48e-3 +4.48e-3 1.0e-3 10.0 2.5e-3 @@ -555,6 +565,7 @@ '0000_00:00:05' '0000_00:00:02.5' '0000_00:00:01.25' +'0000_00:00:10' 2 .true. 2 @@ -601,6 +612,7 @@ .false. .false. .false. +.false. .false. .false. .false. @@ -1126,6 +1138,7 @@ .true. .true. .true. +.true. '0000-00-00_01:00:00' 'mocStreamfunctionOutput' .true. @@ -1213,6 +1226,7 @@ .true. .true. .true. +.true. 'dt' 'conservationCheckOutput' .false. @@ -1224,6 +1238,7 @@ .true. .true. .true. +.true. .false. .true. .true. @@ -1233,6 +1248,7 @@ .true. .true. .true. +.true. .true. 'conservationCheckRestart' diff --git a/components/mpas-ocean/cime_config/buildnml b/components/mpas-ocean/cime_config/buildnml index 47076dee726a..06868179668f 100755 --- a/components/mpas-ocean/cime_config/buildnml +++ b/components/mpas-ocean/cime_config/buildnml @@ -365,6 +365,20 @@ def buildnml(case, caseroot, compname): ic_date = '20240314' ic_prefix = 'mpaso.IcosXISC30E3r7.rstFromPiControlSpinup-chrysalis' + elif ocn_grid == 'RRSwISC6to18E3r5': + decomp_date = '20240327' + decomp_prefix = 'partitions/mpas-o.graph.info.' + restoring_file = 'sss.PHC2_monthlyClimatology.RRSwISC6to18E3r5.20240327.nc' + analysis_mask_file = 'RRSwISC6to18E3r5_mocBasinsAndTransects20210623.nc' + ic_date = '20240327' + ic_prefix = 'mpaso.RRSwISC6to18E3r5' + if ocn_ic_mode == 'spunup': + logger.warning("WARNING: The specified compset is requesting ocean ICs spunup from a G-case") + logger.warning(" But no file available for this grid.") + if ocn_ismf == 'data': + data_ismf_file = 'prescribed_ismf_paolo2023.RRSwISC6to18E3r5.20240327.nc' + + #-------------------------------------------------------------------- # Set OCN_FORCING = datm_forced_restoring if restoring file is available #-------------------------------------------------------------------- @@ -491,7 +505,8 @@ def buildnml(case, caseroot, compname): lines.append('') lines.append('') lines.append('') lines.append('') lines.append(' ') lines.append(' ') - if not ocn_grid.startswith("oRRS1"): + if not (ocn_grid.startswith("oRRS1") or ocn_grid.startswith("RRSwISC6")): lines.append(' ') lines.append(' ') lines.append(' ') @@ -1438,7 +1458,8 @@ def buildnml(case, caseroot, compname): lines.append('240.0 120.0 60.0 +900.0 'noleap' '2000-01-01_00:00:00' 'none' @@ -89,6 +90,7 @@ 75.0 85.0 85.0 +85.0 -60.0 -75.0 -75.0 @@ -103,6 +105,7 @@ -85.0 -85.0 -85.0 +-85.0 'uniform' 0.0 0.0 @@ -166,6 +169,7 @@ 1 1 1 +2 true true 120 diff --git a/components/mpas-seaice/cime_config/buildnml b/components/mpas-seaice/cime_config/buildnml index 8337fcfc7061..b8bdf632a1ec 100755 --- a/components/mpas-seaice/cime_config/buildnml +++ b/components/mpas-seaice/cime_config/buildnml @@ -318,6 +318,16 @@ def buildnml(case, caseroot, compname): grid_date = '20240314' grid_prefix = 'mpassi.IcosXISC30E3r7.rstFromPiControlSpinup-chrysalis' + elif ice_grid == 'RRSwISC6to18E3r5': + decomp_date = '20240327' + decomp_prefix = 'partitions/mpas-seaice.graph.info.' + grid_date = '20240327' + grid_prefix = 'mpassi.RRSwISC6to18E3r5' + data_iceberg_file = 'Iceberg_Climatology_Merino.RRSwISC6to18E3r5.20240327.nc' + if ice_ic_mode == 'spunup': + logger.warning("WARNING: The specified compset is requesting seaice ICs spunup from a G-case") + logger.warning(" But no file available for this grid.") + elif ice_grid == 'ICOS10': grid_date = '211015' grid_prefix = 'seaice.ICOS10' @@ -451,7 +461,8 @@ def buildnml(case, caseroot, compname): lines.append('') lines.append(' Date: Mon, 11 Dec 2023 21:55:58 -0800 Subject: [PATCH 12/40] Add RRSwISC6to18E3r5 to ELM --- components/elm/bld/namelist_files/namelist_definition.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/elm/bld/namelist_files/namelist_definition.xml b/components/elm/bld/namelist_files/namelist_definition.xml index 0f8b03dfec38..55206b5de6b4 100644 --- a/components/elm/bld/namelist_files/namelist_definition.xml +++ b/components/elm/bld/namelist_files/namelist_definition.xml @@ -1423,7 +1423,7 @@ Representative concentration pathway for future scenarios [radiative forcing at + valid_values="USGS,gx3v7,gx1v6,navy,test,tx0.1v2,tx1v1,T62,TL319,cruncep,oEC60to30v3,oEC60to30v3wLI,ECwISC30to60E1r2,EC30to60E2r2,WC14to60E2r3,WCAtl12to45E2r4,SOwISC12to60E2r4,ECwISC30to60E2r1,oRRS18to6,oRRS18to6v3,oRRS15to5,oARRM60to10,oARRM60to6,ARRM10to60E2r1,oQU480,oQU240,oQU240wLI,oQU120,oRRS30to10v3,oRRS30to10v3wLI,360x720cru,NLDASww3a,NLDAS,tx0.1v2,ICOS10,IcoswISC30E3r5,IcosXISC30E3r7,RRSwISC6to18E3r5"> Land mask description From d7d700de080d63ce590b5f2c36fe8f9e1459dddc Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 5 Apr 2024 13:57:28 -0500 Subject: [PATCH 13/40] Update the partition files for ocean and ice The new ones use a 64-bit build of gpmetis that does not leave partitions with zero cells. --- components/mpas-ocean/cime_config/buildnml | 2 +- components/mpas-seaice/cime_config/buildnml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/components/mpas-ocean/cime_config/buildnml b/components/mpas-ocean/cime_config/buildnml index 06868179668f..99c028a8808b 100755 --- a/components/mpas-ocean/cime_config/buildnml +++ b/components/mpas-ocean/cime_config/buildnml @@ -366,7 +366,7 @@ def buildnml(case, caseroot, compname): ic_prefix = 'mpaso.IcosXISC30E3r7.rstFromPiControlSpinup-chrysalis' elif ocn_grid == 'RRSwISC6to18E3r5': - decomp_date = '20240327' + decomp_date = '20240404' decomp_prefix = 'partitions/mpas-o.graph.info.' restoring_file = 'sss.PHC2_monthlyClimatology.RRSwISC6to18E3r5.20240327.nc' analysis_mask_file = 'RRSwISC6to18E3r5_mocBasinsAndTransects20210623.nc' diff --git a/components/mpas-seaice/cime_config/buildnml b/components/mpas-seaice/cime_config/buildnml index b8bdf632a1ec..5522d8ac6511 100755 --- a/components/mpas-seaice/cime_config/buildnml +++ b/components/mpas-seaice/cime_config/buildnml @@ -319,7 +319,7 @@ def buildnml(case, caseroot, compname): grid_prefix = 'mpassi.IcosXISC30E3r7.rstFromPiControlSpinup-chrysalis' elif ice_grid == 'RRSwISC6to18E3r5': - decomp_date = '20240327' + decomp_date = '20240404' decomp_prefix = 'partitions/mpas-seaice.graph.info.' grid_date = '20240327' grid_prefix = 'mpassi.RRSwISC6to18E3r5' From e3cdc9f5176f6be3a22a7427a745924cbb5f549d Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 28 May 2024 17:00:36 -0700 Subject: [PATCH 14/40] add namelist build check to disallow fates sp mode with fates hydro --- components/elm/bld/ELMBuildNamelist.pm | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/components/elm/bld/ELMBuildNamelist.pm b/components/elm/bld/ELMBuildNamelist.pm index 250d8666d25a..1500cbecd297 100755 --- a/components/elm/bld/ELMBuildNamelist.pm +++ b/components/elm/bld/ELMBuildNamelist.pm @@ -3317,6 +3317,10 @@ sub setup_logic_fates { if ( $nl->get_value('fates_spitfire_mode') > 0 ) { fatal_error('fates_spitfire_mode can NOT be set to greater than 0 when use_fates_sp is true'); } + # hydro isn't currently supported to work when FATES SP mode is active + if (&value_is_true( $nl->get_value('use_fates_planthydro') )) { + fatal_error('fates sp mode is currently not supported to work with fates hydro'); + } } } # check that fates landuse change mode has the necessary luh2 landuse timeseries data From 60fe431e22c43778991f5f66e419526a30e70924 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 28 May 2024 17:14:01 -0700 Subject: [PATCH 15/40] update default fates parameter file --- components/elm/bld/namelist_files/namelist_defaults.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/elm/bld/namelist_files/namelist_defaults.xml b/components/elm/bld/namelist_files/namelist_defaults.xml index 23d05a8f7721..429469df314a 100644 --- a/components/elm/bld/namelist_files/namelist_defaults.xml +++ b/components/elm/bld/namelist_files/namelist_defaults.xml @@ -134,7 +134,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). -lnd/clm2/paramdata/fates_params_api.32.0.0_12pft_c231215.nc +lnd/clm2/paramdata/fates_params_api.35.0.0_12pft_c240326.nc lnd/clm2/paramdata/CNP_parameters_c131108.nc From bd00c6724ce6149069407ebb6afd41faf2fd6b76 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Wed, 29 May 2024 14:14:53 -0700 Subject: [PATCH 16/40] update fates allvars testmod with renamed history outputs --- .../elm/fates_cold_allvars/user_nl_elm | 124 ++++++++---------- 1 file changed, 54 insertions(+), 70 deletions(-) diff --git a/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm b/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm index 2aff9c0b3c23..8ccaf9f5bb39 100644 --- a/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm +++ b/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm @@ -2,73 +2,57 @@ hist_mfilt = 365 hist_nhtfrq = -24 hist_empty_htapes = .false. fates_spitfire_mode = 1 -hist_fincl1 = 'FATES_CROWNAREA_PF', 'FATES_CANOPYCROWNAREA_PF', -'FATES_NCL_AP', 'FATES_NPATCH_AP', 'FATES_VEGC_AP', -'FATES_SECONDARY_FOREST_FRACTION', 'FATES_WOOD_PRODUCT', -'FATES_SECONDARY_FOREST_VEGC', 'FATES_SECONDAREA_ANTHRODIST_AP', -'FATES_SECONDAREA_DIST_AP', 'FATES_STOMATAL_COND_AP', 'FATES_LBLAYER_COND_AP', -'FATES_NPP_AP', 'FATES_GPP_AP', 'FATES_PARSUN_Z_CLLL', 'FATES_PARSHA_Z_CLLL', -'FATES_PARSUN_Z_CLLLPF', 'FATES_PARSHA_Z_CLLLPF', 'FATES_PARSUN_Z_CL', -'FATES_PARSHA_Z_CL', 'FATES_LAISUN_Z_CLLL', 'FATES_LAISHA_Z_CLLL', -'FATES_LAISUN_Z_CLLLPF', 'FATES_LAISHA_Z_CLLLPF', 'FATES_LAISUN_TOP_CL', -'FATES_LAISHA_TOP_CL', 'FATES_FABD_SUN_CLLLPF', 'FATES_FABD_SHA_CLLLPF', -'FATES_FABI_SUN_CLLLPF', 'FATES_FABI_SHA_CLLLPF', 'FATES_FABD_SUN_CLLL', -'FATES_FABD_SHA_CLLL', 'FATES_FABI_SUN_CLLL', 'FATES_FABI_SHA_CLLL', -'FATES_PARPROF_DIR_CLLLPF', 'FATES_PARPROF_DIF_CLLLPF','FATES_FABD_SUN_TOPLF_CL', -'FATES_FABD_SHA_TOPLF_CL', 'FATES_FABI_SUN_TOPLF_CL', 'FATES_FABI_SHA_TOPLF_CL', -'FATES_NET_C_UPTAKE_CLLL', 'FATES_CROWNAREA_CLLL', 'FATES_NPLANT_CANOPY_SZAP', -'FATES_NPLANT_USTORY_SZAP', 'FATES_DDBH_CANOPY_SZAP', 'FATES_DDBH_USTORY_SZAP', -'FATES_MORTALITY_CANOPY_SZAP', 'FATES_MORTALITY_USTORY_SZAP', -'FATES_NPLANT_SZAPPF', 'FATES_NPP_APPF', 'FATES_VEGC_APPF', 'FATES_GPP_SZPF', -'FATES_GPP_CANOPY_SZPF', 'FATES_AUTORESP_CANOPY_SZPF', 'FATES_GPP_USTORY_SZPF', -'FATES_AUTORESP_USTORY_SZPF', 'FATES_NPP_SZPF', 'FATES_LEAF_ALLOC_SZPF', -'FATES_SEED_ALLOC_SZPF', 'FATES_FROOT_ALLOC_SZPF', 'FATES_BGSAPWOOD_ALLOC_SZPF', -'FATES_BGSTRUCT_ALLOC_SZPF', 'FATES_AGSAPWOOD_ALLOC_SZPF', -'FATES_AGSTRUCT_ALLOC_SZPF', 'FATES_STORE_ALLOC_SZPF', 'FATES_DDBH_SZPF', -'FATES_GROWTHFLUX_SZPF', 'FATES_GROWTHFLUX_FUSION_SZPF', -'FATES_DDBH_CANOPY_SZPF', 'FATES_DDBH_USTORY_SZPF', 'FATES_BASALAREA_SZPF', -'FATES_VEGC_ABOVEGROUND_SZPF', 'FATES_NPLANT_SZPF', 'FATES_NPLANT_ACPF', -'FATES_MORTALITY_BACKGROUND_SZPF', 'FATES_MORTALITY_HYDRAULIC_SZPF', -'FATES_MORTALITY_CSTARV_SZPF', 'FATES_MORTALITY_IMPACT_SZPF', -'FATES_MORTALITY_FIRE_SZPF', 'FATES_MORTALITY_CROWNSCORCH_SZPF', -'FATES_MORTALITY_CAMBIALBURN_SZPF', 'FATES_MORTALITY_TERMINATION_SZPF', -'FATES_MORTALITY_LOGGING_SZPF', 'FATES_MORTALITY_FREEZING_SZPF', -'FATES_MORTALITY_SENESCENCE_SZPF', 'FATES_MORTALITY_AGESCEN_SZPF', -'FATES_MORTALITY_AGESCEN_ACPF', 'FATES_MORTALITY_CANOPY_SZPF', -'FATES_STOREC_CANOPY_SZPF', 'FATES_LEAFC_CANOPY_SZPF', -'FATES_NPLANT_CANOPY_SZPF', 'FATES_MORTALITY_USTORY_SZPF', -'FATES_STOREC_USTORY_SZPF', 'FATES_LEAFC_USTORY_SZPF', -'FATES_NPLANT_USTORY_SZPF', 'FATES_CWD_ABOVEGROUND_DC', -'FATES_CWD_BELOWGROUND_DC', 'FATES_CWD_ABOVEGROUND_IN_DC', -'FATES_CWD_BELOWGROUND_IN_DC', 'FATES_CWD_ABOVEGROUND_OUT_DC', -'FATES_CWD_BELOWGROUND_OUT_DC', 'FATES_AUTORESP_SZPF', 'FATES_GROWAR_SZPF', -'FATES_MAINTAR_SZPF', 'FATES_RDARK_SZPF', 'FATES_AGSAPMAINTAR_SZPF', -'FATES_BGSAPMAINTAR_SZPF', 'FATES_FROOTMAINTAR_SZPF', -'FATES_YESTCANLEV_CANOPY_SZ', 'FATES_YESTCANLEV_USTORY_SZ', -'FATES_VEGC_SZ', 'FATES_DEMOTION_RATE_SZ', 'FATES_PROMOTION_RATE_SZ', -'FATES_SAI_CANOPY_SZ', 'FATES_SAI_USTORY_SZ', 'FATES_NPP_CANOPY_SZ', -'FATES_NPP_USTORY_SZ', 'FATES_TRIMMING_CANOPY_SZ', 'FATES_TRIMMING_USTORY_SZ', -'FATES_CROWNAREA_CANOPY_SZ', 'FATES_CROWNAREA_USTORY_SZ', -'FATES_LEAFCTURN_CANOPY_SZ', 'FATES_FROOTCTURN_CANOPY_SZ', -'FATES_STORECTURN_CANOPY_SZ', 'FATES_STRUCTCTURN_CANOPY_SZ', -'FATES_SAPWOODCTURN_CANOPY_SZ', 'FATES_SEED_PROD_CANOPY_SZ', -'FATES_LEAF_ALLOC_CANOPY_SZ', 'FATES_FROOT_ALLOC_CANOPY_SZ', -'FATES_SAPWOOD_ALLOC_CANOPY_SZ', 'FATES_STRUCT_ALLOC_CANOPY_SZ', -'FATES_SEED_ALLOC_CANOPY_SZ', 'FATES_STORE_ALLOC_CANOPY_SZ', -'FATES_RDARK_CANOPY_SZ', 'FATES_LSTEMMAINTAR_CANOPY_SZ', -'FATES_CROOTMAINTAR_CANOPY_SZ', 'FATES_FROOTMAINTAR_CANOPY_SZ', -'FATES_GROWAR_CANOPY_SZ', 'FATES_MAINTAR_CANOPY_SZ', -'FATES_LEAFCTURN_USTORY_SZ', 'FATES_FROOTCTURN_USTORY_SZ', -'FATES_STORECTURN_USTORY_SZ', 'FATES_STRUCTCTURN_USTORY_SZ', -'FATES_SAPWOODCTURN_USTORY_SZ', 'FATES_SEED_PROD_USTORY_SZ', -'FATES_LEAF_ALLOC_USTORY_SZ', 'FATES_FROOT_ALLOC_USTORY_SZ', -'FATES_SAPWOOD_ALLOC_USTORY_SZ', 'FATES_STRUCT_ALLOC_USTORY_SZ', -'FATES_SEED_ALLOC_USTORY_SZ', 'FATES_STORE_ALLOC_USTORY_SZ', -'FATES_RDARK_USTORY_SZ', 'FATES_LSTEMMAINTAR_USTORY_SZ', -'FATES_CROOTMAINTAR_USTORY_SZ', 'FATES_FROOTMAINTAR_USTORY_SZ', -'FATES_GROWAR_USTORY_SZ', 'FATES_MAINTAR_USTORY_SZ', 'FATES_VEGC_SZPF', -'FATES_LEAFC_SZPF', 'FATES_FROOTC_SZPF', 'FATES_SAPWOODC_SZPF', -'FATES_STOREC_SZPF', 'FATES_REPROC_SZPF', 'FATES_DROUGHT_STATUS_PF', -'FATES_DAYSINCE_DROUGHTLEAFOFF_PF', 'FATES_DAYSINCE_DROUGHTLEAFON_PF', -'FATES_MEANLIQVOL_DROUGHTPHEN_PF', 'FATES_MEANSMP_DROUGHTPHEN_PF', -'FATES_ELONG_FACTOR_PF' +fates_history_dimlevel = 2,2 +use_fates_tree_damage = .true. +hist_ndens = 1 +hist_fincl1 = 'FATES_TLONGTERM', +'FATES_TGROWTH','FATES_SEEDS_IN_GRIDCELL_PF','FATES_SEEDS_OUT_GRIDCELL_PF','FATES_NCL_AP', +'FATES_NPATCH_AP','FATES_VEGC_AP','FATES_SECONDAREA_ANTHRODIST_AP','FATES_SECONDAREA_DIST_AP', +'FATES_FUEL_AMOUNT_APFC','FATES_STOREC_TF_USTORY_SZPF','FATES_STOREC_TF_CANOPY_SZPF', +'FATES_CROWNAREA_CLLL','FATES_ABOVEGROUND_MORT_SZPF', +'FATES_ABOVEGROUND_PROD_SZPF','FATES_NPLANT_SZAP','FATES_NPLANT_CANOPY_SZAP', +'FATES_NPLANT_USTORY_SZAP','FATES_DDBH_CANOPY_SZAP','FATES_DDBH_USTORY_SZAP', +'FATES_MORTALITY_CANOPY_SZAP','FATES_MORTALITY_USTORY_SZAP','FATES_NPLANT_SZAPPF', +'FATES_NPP_APPF','FATES_VEGC_APPF','FATES_SCORCH_HEIGHT_APPF','FATES_GPP_SZPF', +'FATES_GPP_CANOPY_SZPF','FATES_AUTORESP_CANOPY_SZPF','FATES_GPP_USTORY_SZPF', +'FATES_AUTORESP_USTORY_SZPF','FATES_NPP_SZPF','FATES_LEAF_ALLOC_SZPF', +'FATES_SEED_ALLOC_SZPF','FATES_FROOT_ALLOC_SZPF','FATES_BGSAPWOOD_ALLOC_SZPF', +'FATES_BGSTRUCT_ALLOC_SZPF','FATES_AGSAPWOOD_ALLOC_SZPF','FATES_AGSTRUCT_ALLOC_SZPF', +'FATES_STORE_ALLOC_SZPF','FATES_DDBH_SZPF','FATES_GROWTHFLUX_SZPF','FATES_GROWTHFLUX_FUSION_SZPF', +'FATES_DDBH_CANOPY_SZPF','FATES_DDBH_USTORY_SZPF','FATES_BASALAREA_SZPF','FATES_VEGC_ABOVEGROUND_SZPF', +'FATES_NPLANT_SZPF','FATES_NPLANT_ACPF','FATES_MORTALITY_BACKGROUND_SZPF','FATES_MORTALITY_HYDRAULIC_SZPF', +'FATES_MORTALITY_CSTARV_SZPF','FATES_MORTALITY_IMPACT_SZPF','FATES_MORTALITY_FIRE_SZPF', +'FATES_MORTALITY_CROWNSCORCH_SZPF','FATES_MORTALITY_CAMBIALBURN_SZPF','FATES_MORTALITY_TERMINATION_SZPF', +'FATES_MORTALITY_LOGGING_SZPF','FATES_MORTALITY_FREEZING_SZPF','FATES_MORTALITY_SENESCENCE_SZPF', +'FATES_MORTALITY_AGESCEN_SZPF','FATES_MORTALITY_AGESCEN_ACPF','FATES_MORTALITY_CANOPY_SZPF', +'FATES_M3_MORTALITY_CANOPY_SZPF','FATES_M3_MORTALITY_USTORY_SZPF','FATES_C13DISC_SZPF', +'FATES_STOREC_CANOPY_SZPF','FATES_LEAFC_CANOPY_SZPF','FATES_LAI_CANOPY_SZPF','FATES_CROWNAREA_CANOPY_SZPF', +'FATES_CROWNAREA_USTORY_SZPF','FATES_NPLANT_CANOPY_SZPF','FATES_MORTALITY_USTORY_SZPF','FATES_STOREC_USTORY_SZPF', +'FATES_LEAFC_USTORY_SZPF','FATES_LAI_USTORY_SZPF','FATES_NPLANT_USTORY_SZPF','FATES_CWD_ABOVEGROUND_DC', +'FATES_CWD_BELOWGROUND_DC','FATES_CWD_ABOVEGROUND_IN_DC','FATES_CWD_BELOWGROUND_IN_DC', +'FATES_CWD_ABOVEGROUND_OUT_DC','FATES_CWD_BELOWGROUND_OUT_DC','FATES_YESTCANLEV_CANOPY_SZ', +'FATES_YESTCANLEV_USTORY_SZ','FATES_VEGC_SZ','FATES_DEMOTION_RATE_SZ','FATES_PROMOTION_RATE_SZ', +'FATES_SAI_CANOPY_SZ','FATES_M3_MORTALITY_CANOPY_SZ','FATES_M3_MORTALITY_USTORY_SZ','FATES_SAI_USTORY_SZ', +'FATES_NPP_CANOPY_SZ','FATES_NPP_USTORY_SZ','FATES_TRIMMING_CANOPY_SZ','FATES_TRIMMING_USTORY_SZ', +'FATES_CROWNAREA_CANOPY_SZ','FATES_CROWNAREA_USTORY_SZ','FATES_LEAFCTURN_CANOPY_SZ','FATES_FROOTCTURN_CANOPY_SZ', +'FATES_STORECTURN_CANOPY_SZ','FATES_STRUCTCTURN_CANOPY_SZ','FATES_SAPWOODCTURN_CANOPY_SZ','FATES_SEED_PROD_CANOPY_SZ', +'FATES_LEAF_ALLOC_CANOPY_SZ','FATES_FROOT_ALLOC_CANOPY_SZ','FATES_SAPWOOD_ALLOC_CANOPY_SZ','FATES_STRUCT_ALLOC_CANOPY_SZ', +'FATES_SEED_ALLOC_CANOPY_SZ','FATES_STORE_ALLOC_CANOPY_SZ','FATES_LEAFCTURN_USTORY_SZ','FATES_FROOTCTURN_USTORY_SZ', +'FATES_STORECTURN_USTORY_SZ','FATES_STRUCTCTURN_USTORY_SZ','FATES_SAPWOODCTURN_USTORY_SZ', +'FATES_SEED_PROD_USTORY_SZ','FATES_LEAF_ALLOC_USTORY_SZ','FATES_FROOT_ALLOC_USTORY_SZ','FATES_SAPWOOD_ALLOC_USTORY_SZ', +'FATES_STRUCT_ALLOC_USTORY_SZ','FATES_SEED_ALLOC_USTORY_SZ','FATES_STORE_ALLOC_USTORY_SZ','FATES_CROWNAREA_CANOPY_CD', +'FATES_CROWNAREA_USTORY_CD','FATES_NPLANT_CDPF','FATES_NPLANT_CANOPY_CDPF','FATES_NPLANT_USTORY_CDPF', +'FATES_M3_CDPF','FATES_M11_SZPF','FATES_M11_CDPF','FATES_MORTALITY_CDPF','FATES_M3_MORTALITY_CANOPY_CDPF', +'FATES_M3_MORTALITY_USTORY_CDPF','FATES_M11_MORTALITY_CANOPY_CDPF','FATES_M11_MORTALITY_USTORY_CDPF', +'FATES_MORTALITY_CANOPY_CDPF','FATES_MORTALITY_USTORY_CDPF','FATES_DDBH_CDPF','FATES_DDBH_CANOPY_CDPF', +'FATES_DDBH_USTORY_CDPF','FATES_VEGC_SZPF','FATES_LEAFC_SZPF','FATES_FROOTC_SZPF','FATES_SAPWOODC_SZPF', +'FATES_STOREC_SZPF','FATES_REPROC_SZPF','FATES_NPP_AP','FATES_GPP_AP','FATES_RDARK_USTORY_SZ', +'FATES_LSTEMMAINTAR_USTORY_SZ','FATES_CROOTMAINTAR_USTORY_SZ','FATES_FROOTMAINTAR_USTORY_SZ','FATES_GROWAR_USTORY_SZ', +'FATES_MAINTAR_USTORY_SZ','FATES_RDARK_CANOPY_SZ','FATES_CROOTMAINTAR_CANOPY_SZ','FATES_FROOTMAINTAR_CANOPY_SZ', +'FATES_GROWAR_CANOPY_SZ','FATES_MAINTAR_CANOPY_SZ','FATES_LSTEMMAINTAR_CANOPY_SZ','FATES_AUTORESP_SZPF', +'FATES_GROWAR_SZPF','FATES_MAINTAR_SZPF','FATES_RDARK_SZPF','FATES_AGSAPMAINTAR_SZPF','FATES_BGSAPMAINTAR_SZPF', +'FATES_FROOTMAINTAR_SZPF','FATES_PARSUN_CLLL','FATES_PARSHA_CLLL','FATES_PARSUN_CLLLPF','FATES_PARSHA_CLLLPF', +'FATES_PARSUN_CL','FATES_PARSHA_CL','FATES_LAISUN_CLLL','FATES_LAISHA_CLLL','FATES_LAISUN_CLLLPF', +'FATES_LAISHA_CLLLPF','FATES_PARPROF_DIR_CLLLPF','FATES_PARPROF_DIF_CLLLPF','FATES_LAISUN_CL','FATES_LAISHA_CL', +'FATES_PARPROF_DIR_CLLL','FATES_PARPROF_DIF_CLLL','FATES_NET_C_UPTAKE_CLLL','FATES_CROWNFRAC_CLLLPF', +'FATES_LBLAYER_COND_AP','FATES_STOMATAL_COND_AP' From 9be6081c736640ab2301e64d315428bf57b40212 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Wed, 29 May 2024 14:50:34 -0700 Subject: [PATCH 17/40] change how fates_hist_dimlevel is set Setting fates_hist_dimlevel with a single line (i.e. = 2,2) fails during ELMBuildNamelist. Adjust the assignment to accomodate this. This also removes hist_ndens to avoid issue #1106 --- .../testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm b/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm index 8ccaf9f5bb39..249764fb277d 100644 --- a/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm +++ b/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm @@ -2,9 +2,9 @@ hist_mfilt = 365 hist_nhtfrq = -24 hist_empty_htapes = .false. fates_spitfire_mode = 1 -fates_history_dimlevel = 2,2 +fates_history_dimlevel(1) = 2 +fates_history_dimlevel(2) = 2 use_fates_tree_damage = .true. -hist_ndens = 1 hist_fincl1 = 'FATES_TLONGTERM', 'FATES_TGROWTH','FATES_SEEDS_IN_GRIDCELL_PF','FATES_SEEDS_OUT_GRIDCELL_PF','FATES_NCL_AP', 'FATES_NPATCH_AP','FATES_VEGC_AP','FATES_SECONDAREA_ANTHRODIST_AP','FATES_SECONDAREA_DIST_AP', From 2d05eb647d32a109f713f0183701bd09dcb3f909 Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 4 Jun 2024 09:46:08 -0700 Subject: [PATCH 18/40] place expanded diagnostic write statement behind debug check --- components/elm/src/main/elmfates_interfaceMod.F90 | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/components/elm/src/main/elmfates_interfaceMod.F90 b/components/elm/src/main/elmfates_interfaceMod.F90 index bb8b8d93dc48..1b9d6c1ba490 100644 --- a/components/elm/src/main/elmfates_interfaceMod.F90 +++ b/components/elm/src/main/elmfates_interfaceMod.F90 @@ -661,10 +661,12 @@ subroutine CrossRefHistoryFields write(iulog,*) 'that does not contain that variable in its valid set.' write(iulog,*) 'You may have to increase the namelist setting: fates_history_dimlevel' write(iulog,*) 'current fates_history_dimlevel: ',fates_history_dimlevel(:) - !uncomment if you want to list all fates history variables in registry - !do_fates_hist2: do nh = 1,fates_hist%num_history_vars() - ! write(iulog,*) trim(fates_hist%hvars(nh)%vname) - !end do do_fates_hist2 + if (debug) then + !if you want to list all fates history variables in registry turn on debug + do_fates_hist2: do nh = 1,fates_hist%num_history_vars() + write(iulog,*) trim(fates_hist%hvars(nh)%vname) + end do do_fates_hist2 + end if call endrun(msg=errMsg(sourcefile, __LINE__)) end if end if From a4f20f9747b145fb3777b113ffdedddb0accf07a Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Tue, 4 Jun 2024 14:24:28 -0500 Subject: [PATCH 19/40] Add mapping and domain files for RRSwISC6to18E3r5 mesh --- cime_config/config_grids.xml | 173 +++++++++++++++++++++++++++++++---- 1 file changed, 157 insertions(+), 16 deletions(-) diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml index 7c8eb0ff75bd..05ddf741d27c 100755 --- a/cime_config/config_grids.xml +++ b/cime_config/config_grids.xml @@ -406,6 +406,16 @@ IcoswISC30E3r5 + + T62 + T62 + RRSwISC6to18E3r5 + rx1 + null + null + RRSwISC6to18E3r5 + + TL319 TL319 @@ -616,6 +626,16 @@ IcosXISC30E3r7 + + TL319 + TL319 + RRSwISC6to18E3r5 + JRA025 + null + null + RRSwISC6to18E3r5 + + TL319 TL319 @@ -1340,6 +1360,16 @@ IcoswISC30E3r5 + + ne30np4.pg2 + ne30np4.pg2 + RRSwISC6to18E3r5 + r05 + null + null + RRSwISC6to18E3r5 + + ne0np4_northamericax4v1 r0125 @@ -1623,6 +1653,26 @@ IcoswISC30E3r5 + + ne120np4.pg2 + r05 + RRSwISC6to18E3r5 + r05 + null + null + RRSwISC6to18E3r5 + + + + ne120np4.pg2 + r025 + RRSwISC6to18E3r5 + r025 + null + null + RRSwISC6to18E3r5 + + ne240np4 ne240np4 @@ -2291,6 +2341,16 @@ IcosXISC30E3r7 + + ne30np4.pg2 + r05 + RRSwISC6to18E3r5 + r05 + null + null + RRSwISC6to18E3r5 + + ne30np4.pg2 r05 @@ -2601,6 +2661,7 @@ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_SOwISC12to60E2r4.210119.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_ECwISC30to60E2r1.201007.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_IcoswISC30E3r5.231121.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_RRSwISC6to18E3r5.240328.nc T62 is Gaussian grid: @@ -2657,6 +2718,8 @@ $DIN_LOC_ROOT/share/domains/domain.ocn.TL319_IcoswISC30E3r5.231121.nc $DIN_LOC_ROOT/share/domains/domain.lnd.TL319_IcosXISC30E3r7.240326.nc $DIN_LOC_ROOT/share/domains/domain.ocn.TL319_IcosXISC30E3r7.240326.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.TL319_RRSwISC6to18E3r5.240328.nc + $DIN_LOC_ROOT/share/domains/domain.ocn.TL319_RRSwISC6to18E3r5.240328.nc $DIN_LOC_ROOT/share/domains/domain.lnd.TL319_oRRS18to6v3.220124.nc $DIN_LOC_ROOT/share/domains/domain.ocn.TL319_oRRS18to6v3.220124.nc TL319 is JRA lat/lon grid: @@ -2768,6 +2831,8 @@ $DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_IcoswISC30E3r5.231121.nc $DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_IcosXISC30E3r7.240326.nc $DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_IcosXISC30E3r7.240326.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_RRSwISC6to18E3r5.240328.nc + $DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_RRSwISC6to18E3r5.240328.nc $DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_gx1v6.190806.nc $DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_gx1v6.190806.nc ne30np4.pg2 is Spectral Elem 1-deg grid w/ 2x2 FV physics grid per element: @@ -2839,6 +2904,8 @@ $DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_ICOS10.230120.nc $DIN_LOC_ROOT/share/domains/domain.lnd.ne120pg2_IcoswISC30E3r5.231121.nc $DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_IcoswISC30E3r5.231121.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.ne120pg2_RRSwISC6to18E3r5.240328.nc + $DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_RRSwISC6to18E3r5.240328.nc $DIN_LOC_ROOT/share/domains/domain.lnd.ne120pg2_gx1v6.190819.nc $DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_gx1v6.190819.nc ne120np4 is Spectral Elem 1/4-deg grid w/ 2x2 FV physics grid @@ -3077,6 +3144,13 @@ IcosXISC30E3r7 is a MPAS ocean grid generated with the jigsaw/compass process using a dual mesh that is a subdivided icosahedron, resulting in a nearly uniform resolution of 30 km.: + + 4062533 + 1 + $DIN_LOC_ROOT/share/domains/domain.ocn.RRSwISC6to18E3r5.240328.nc + RRSwISC6to18E3r5 is a MPAS ocean grid generated with the jigsaw/compass process using a mesh density function that is roughly proportional to the Rossby radius of deformation, with 18 km gridcells at low and 6 km gridcells at high latitudes. Additionally, it has ocean in ice-shelf cavities: + + @@ -3111,6 +3185,8 @@ $DIN_LOC_ROOT/share/domains/domain.lnd.r05_IcoswISC30E3r5.231121.nc $DIN_LOC_ROOT/share/domains/domain.lnd.r05_IcosXISC30E3r7.240326.nc $DIN_LOC_ROOT/share/domains/domain.lnd.r05_IcosXISC30E3r7.240326.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.r05_RRSwISC6to18E3r5.240328.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.r05_RRSwISC6to18E3r5.240328.nc $DIN_LOC_ROOT/share/domains/domain.lnd.r05_gx1v6.191014.nc r05 is 1/2 degree river routing grid: @@ -3144,6 +3220,8 @@ 1440 720 $DIN_LOC_ROOT/share/domains/domain.lnd.r025_IcoswISC30E3r5.240129.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.r025_RRSwISC6to18E3r5.240328.nc + r025 is 1/4 degree river routing grid: @@ -3601,6 +3679,16 @@ cpl/gridmaps/ne30pg2/map_ne30pg2_to_IcosXISC30E3r7_trfvnp2.20240326.nc + + cpl/gridmaps/ne30pg2/map_ne30pg2_to_RRSwISC6to18E3r5_traave.20240328.nc + cpl/gridmaps/ne30pg2/map_ne30pg2_to_RRSwISC6to18E3r5_trbilin.20240328.nc + cpl/gridmaps/ne30pg2/map_ne30pg2_to_RRSwISC6to18E3r5-nomask_trbilin.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_ne30pg2_traave.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_ne30pg2_traave.20240328.nc + cpl/gridmaps/ne30pg2/map_ne30pg2_to_RRSwISC6to18E3r5_trfvnp2.20240328.nc + cpl/gridmaps/ne30pg2/map_ne30pg2_to_RRSwISC6to18E3r5_trfvnp2.20240328.nc + + cpl/gridmaps/ne30pg3/map_ne30pg3_to_oEC60to30v3_mono.200331.nc cpl/gridmaps/ne30pg3/map_ne30pg3_to_oEC60to30v3_bilin.200331.nc @@ -3676,6 +3764,14 @@ cpl/gridmaps/ne30pg4/map_r05_to_ne30pg4_mono.200331.nc + + cpl/gridmaps/ne120pg2/map_ne30pg2_to_r025_traave.20240206.nc + cpl/gridmaps/ne120pg2/map_ne30pg2_to_r025_trfv2.20240206.nc + cpl/gridmaps/ne120pg2/map_ne30pg2_to_r025_esmfbilin.20240206.nc + cpl/gridmaps/ne120pg2/map_r025_to_ne30pg2_traave.20240206.nc + cpl/gridmaps/ne120pg2/map_r025_to_ne30pg2_traave.20240206.nc + + cpl/gridmaps/ne30np4/map_ne30np4_to_r0125_mono.190801.nc cpl/gridmaps/ne30np4/map_ne30np4_to_r0125_mono.190801.nc @@ -3775,22 +3871,6 @@ cpl/gridmaps/ne120pg2/map_ne120pg2_to_r0125_mono.200707.nc - - cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_traave.20240206.nc - cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_trfv2.20240206.nc - cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_esmfbilin.20240206.nc - cpl/gridmaps/ne120pg2/map_r025_to_ne120pg2_traave.20240206.nc - cpl/gridmaps/ne120pg2/map_r025_to_ne120pg2_traave.20240206.nc - - - - cpl/gridmaps/ne120pg2/map_ne30pg2_to_r025_traave.20240206.nc - cpl/gridmaps/ne120pg2/map_ne30pg2_to_r025_trfv2.20240206.nc - cpl/gridmaps/ne120pg2/map_ne30pg2_to_r025_esmfbilin.20240206.nc - cpl/gridmaps/ne120pg2/map_r025_to_ne30pg2_traave.20240206.nc - cpl/gridmaps/ne120pg2/map_r025_to_ne30pg2_traave.20240206.nc - - cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_mono.20200702.nc cpl/gridmaps/ne120np4/map_ne120np4_to_oRRS18to6v3_mono.20200702.nc @@ -3839,6 +3919,14 @@ cpl/gridmaps/IcoswISC30E3r5/map_IcoswISC30E3r5_to_ne120pg2_traave.20231121.nc + + cpl/gridmaps/ne120pg2/map_ne120pg2_to_RRSwISC6to18E3r5_traave.20240328.nc + cpl/gridmaps/ne120pg2/map_ne120pg2_to_RRSwISC6to18E3r5_trbilin.20240328.nc + cpl/gridmaps/ne120pg2/map_ne120pg2_to_RRSwISC6to18E3r5-nomask_trbilin.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_ne120pg2_traave.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_ne120pg2_traave.20240328.nc + + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r05_mono.200331.nc cpl/gridmaps/ne120pg2/map_ne120pg2_to_r05_bilin.200331.nc @@ -3846,11 +3934,24 @@ cpl/gridmaps/ne120pg2/map_r05_to_ne120pg2_mono.200331.nc + + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_traave.20240328.nc + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_trfv2.20240206.nc + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_trbilin.20240328.nc + cpl/gridmaps/ne120pg2/map_r025_to_ne120pg2_traave.20240328.nc + cpl/gridmaps/ne120pg2/map_r025_to_ne120pg2_trbilin.20240328.nc + + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r05_mono.200331.nc cpl/gridmaps/ne120pg2/map_ne120pg2_to_r05_bilin.200331.nc + + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_traave.20240328.nc + cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_trbilin.20240328.nc + + cpl/gridmaps/ne240np4/map_ne240np4_to_gx1v6_aave_110428.nc cpl/gridmaps/ne240np4/map_ne240np4_to_gx1v6_aave_110428.nc @@ -4368,6 +4469,14 @@ cpl/gridmaps/IcoswISC30E3r5/map_IcoswISC30E3r5_to_T62_traave.20231121.nc + + cpl/gridmaps/T62/map_T62_to_RRSwISC6to18E3r5_traave.20240328.nc + cpl/gridmaps/T62/map_T62_to_RRSwISC6to18E3r5-nomask_trbilin.20240328.nc + cpl/gridmaps/T62/map_T62_to_RRSwISC6to18E3r5_esmfpatch.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_T62_traave.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_T62_traave.20240328.nc + + cpl/gridmaps/TL319/map_TL319_to_oQU240wLI_traave.20240509.nc cpl/gridmaps/TL319/map_TL319_to_oQU240wLI-nomask_trbilin.20240509.nc @@ -4512,6 +4621,14 @@ cpl/gridmaps/IcosXISC30E3r7/map_IcosXISC30E3r7_to_TL319_traave.20240326.nc + + cpl/gridmaps/TL319/map_TL319_to_RRSwISC6to18E3r5_traave.20240328.nc + cpl/gridmaps/TL319/map_TL319_to_RRSwISC6to18E3r5-nomask_trbilin.20240328.nc + cpl/gridmaps/TL319/map_TL319_to_RRSwISC6to18E3r5_esmfpatch.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_TL319_traave.20240328.nc + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_TL319_traave.20240328.nc + + cpl/gridmaps/TL319/map_TL319_to_oRRS18to6v3_aave.220124.nc cpl/gridmaps/TL319/map_TL319_to_oRRS18to6v3_bilin.220124.nc @@ -4892,6 +5009,10 @@ cpl/gridmaps/IcosXISC30E3r7/map_IcosXISC30E3r7_to_r05_traave.20240326.nc + + cpl/gridmaps/RRSwISC6to18E3r5/map_RRSwISC6to18E3r5_to_r05_traave.20240328.nc + + cpl/cpl6/map_EC30to60E2r2_to_r05_neareststod.220728.nc @@ -5002,6 +5123,11 @@ cpl/cpl6/map_rx1_to_IcoswISC30E3r5_cstmnn.r150e300.20231121.nc + + cpl/cpl6/map_rx1_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_rx1_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + + cpl/cpl6/map_JRA025_to_oQU240wLI_cstmnn.r150e300.20240516.nc cpl/cpl6/map_JRA025_to_oQU240wLI_cstmnn.r150e300.20240516.nc @@ -5092,6 +5218,11 @@ cpl/cpl6/map_JRA025_to_IcosXISC30E3r7_cstmnn.r150e300.20240326.nc + + cpl/cpl6/map_JRA025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_JRA025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + + cpl/cpl6/map_JRA025_to_oRRS18to6v3_smoothed.r50e100.220124.nc cpl/cpl6/map_JRA025_to_oRRS18to6v3_smoothed.r50e100.220124.nc @@ -5182,11 +5313,21 @@ cpl/cpl6/map_r05_to_IcosXISC30E3r7_cstmnn.r150e300.20240326.nc + + cpl/cpl6/map_r05_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_r05_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + + cpl/cpl6/map_r025_to_IcoswISC30E3r5_cstmnn.r150e300.20240401.nc cpl/cpl6/map_r025_to_IcoswISC30E3r5_cstmnn.r150e300.20240401.nc + + cpl/cpl6/map_r025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_r025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + + cpl/cpl6/map_r0125_to_WC14to60E2r3_smoothed.r150e300.200929.nc cpl/cpl6/map_r0125_to_WC14to60E2r3_smoothed.r150e300.200929.nc From 565a93ead6085cf8a70a5efec68617bea2cff56f Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Tue, 4 Jun 2024 14:59:09 -0500 Subject: [PATCH 20/40] Update mapping and domain files created with an incorrect T62 scrip file --- cime_config/config_grids.xml | 108 +++++++++++++++++------------------ 1 file changed, 54 insertions(+), 54 deletions(-) diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml index 7c8eb0ff75bd..714727ad9289 100755 --- a/cime_config/config_grids.xml +++ b/cime_config/config_grids.xml @@ -2581,16 +2581,16 @@ 96 $DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx1v6.090320.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx3v7.090911.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU480.151209.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240.151209.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI_mask.160929.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU120.151209.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3.161222.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3wLI_mask.170328.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU480.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI_mask.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU120.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3wLI.240513.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_ECwISC30to60E1r2.200410.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3.171129.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3wLI_mask.171109.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS18to6v3.170111.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3wLI.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS18to6v3.240513.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS15to5.150722.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oARRM60to10.180716.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oARRM60to6.180803.nc @@ -4209,51 +4209,51 @@ - cpl/gridmaps/T62/map_T62_TO_oQU480_aave.151209.nc - cpl/gridmaps/T62/map_T62_TO_oQU480_patc.151209.nc - cpl/gridmaps/T62/map_T62_TO_oQU480_blin.151209.nc - cpl/gridmaps/oQU480/map_oQU480_TO_T62_aave.151209.nc - cpl/gridmaps/oQU480/map_oQU480_TO_T62_aave.151209.nc + cpl/gridmaps/T62/map_T62_to_oQU480_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU480_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU480_esmfpatch.20240513.nc + cpl/gridmaps/oQU480/map_oQU480_to_T62_traave.20240513.nc + cpl/gridmaps/oQU480/map_oQU480_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_TO_oQU240_aave.151209.nc - cpl/gridmaps/T62/map_T62_TO_oQU240_patc.151209.nc - cpl/gridmaps/T62/map_T62_TO_oQU240_blin.151209.nc - cpl/gridmaps/oQU240/map_oQU240_TO_T62_aave.151209.nc - cpl/gridmaps/oQU240/map_oQU240_TO_T62_aave.151209.nc + cpl/gridmaps/T62/map_T62_to_oQU240_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU240_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU240_esmfpatch.20240513.nc + cpl/gridmaps/oQU240/map_oQU240_to_T62_traave.20240513.nc + cpl/gridmaps/oQU240/map_oQU240_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_TO_oQU240wLI_mask_aave.160929.nc - cpl/gridmaps/T62/map_T62_TO_oQU240wLI_nomask_aave.160929.nc - cpl/gridmaps/T62/map_T62_TO_oQU240wLI_mask_patc.160929.nc - cpl/gridmaps/oQU240wLI/map_oQU240wLI_mask_TO_T62_aave.160929.nc - cpl/gridmaps/oQU240wLI/map_oQU240wLI_mask_TO_T62_aave.160929.nc + cpl/gridmaps/T62/map_T62_to_oQU240wLI_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU240wLI-nomask_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU240wLI_esmfpatch.20240513.nc + cpl/gridmaps/oQU240wLI/map_oQU240wLI_to_T62_traave.20240513.nc + cpl/gridmaps/oQU240wLI/map_oQU240wLI_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_TO_oQU120_aave.151209.nc - cpl/gridmaps/T62/map_T62_TO_oQU120_patc.151209.nc - cpl/gridmaps/T62/map_T62_TO_oQU120_blin.151209.nc - cpl/gridmaps/oQU120/map_oQU120_TO_T62_aave.151209.nc - cpl/gridmaps/oQU120/map_oQU120_TO_T62_aave.151209.nc + cpl/gridmaps/T62/map_T62_to_oQU120_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU120_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oQU120_esmfpatch.20240513.nc + cpl/gridmaps/oQU120/map_oQU120_to_T62_traave.20240513.nc + cpl/gridmaps/oQU120/map_oQU120_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_TO_oEC60to30v3_aave.161222.nc - cpl/gridmaps/T62/map_T62_TO_oEC60to30v3_blin.161222.nc - cpl/gridmaps/T62/map_T62_TO_oEC60to30v3_patc.161222.nc - cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_TO_T62_aave.161222.nc - cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_TO_T62_aave.161222.nc + cpl/gridmaps/T62/map_T62_to_oEC60to30v3_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oEC60to30v3_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oEC60to30v3_esmfpatch.20240513.nc + cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_T62_traave.20240513.nc + cpl/gridmaps/oEC60to30v3/map_oEC60to30v3_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_TO_oEC60to30v3wLI_mask_aave.170328.nc - cpl/gridmaps/T62/map_T62_TO_oEC60to30v3wLI_nomask_blin.170328.nc - cpl/gridmaps/T62/map_T62_TO_oEC60to30v3wLI_mask_patc.170328.nc - cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_mask_TO_T62_aave.170328.nc - cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_mask_TO_T62_aave.170328.nc + cpl/gridmaps/T62/map_T62_to_oEC60to30v3wLI_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oEC60to30v3wLI-nomask_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oEC60to30v3wLI_esmfpatch.20240513.nc + cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_to_T62_traave.20240513.nc + cpl/gridmaps/oEC60to30v3wLI/map_oEC60to30v3wLI_to_T62_traave.20240513.nc @@ -4265,27 +4265,27 @@ - cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3_aave.171128.nc - cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3_blin.171128.nc - cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3_patc.171128.nc - cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_TO_T62_aave.171128.nc - cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_TO_T62_aave.171128.nc + cpl/gridmaps/T62/map_T62_to_oRRS30to10v3_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oRRS30to10v3_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oRRS30to10v3_esmfpatch.20240513.nc + cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_to_T62_traave.20240513.nc + cpl/gridmaps/oRRS30to10v3/map_oRRS30to10v3_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3wLI_mask_aave.171109.nc - cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3wLI_nomask_blin.171109.nc - cpl/gridmaps/T62/map_T62_TO_oRRS30to10v3wLI_mask_patc.171109.nc - cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_mask_TO_T62_aave.171109.nc - cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_mask_TO_T62_aave.171109.nc + cpl/gridmaps/T62/map_T62_to_oRRS30to10v3wLI_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oRRS30to10v3wLI-nomask_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oRRS30to10v3wLI_esmfpatch.20240513.nc + cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_to_T62_traave.20240513.nc + cpl/gridmaps/oRRS30to10v3wLI/map_oRRS30to10v3wLI_to_T62_traave.20240513.nc - cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_aave.170111.nc - cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_patc.170111.nc - cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_blin.170111.nc - cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_T62_aave.170111.nc - cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_T62_aave.170111.nc + cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_traave.20240513.nc + cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_trbilin.20240513.nc + cpl/gridmaps/T62/map_T62_to_oRRS18to6v3_esmfpatch.20240513.nc + cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_T62_traave.20240513.nc + cpl/gridmaps/oRRS18to6v3/map_oRRS18to6v3_to_T62_traave.20240513.nc From b103205b40933fcb7b19e74be4d5cd9a5ed82c3e Mon Sep 17 00:00:00 2001 From: Youngsung Kim Date: Sun, 2 Jun 2024 10:27:12 -0400 Subject: [PATCH 21/40] Matches Nvidia GPU compute capability (CC) version to cc80 on Perlmutter-GPU. --- cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake b/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake index 59758d59989c..852483663a09 100644 --- a/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake +++ b/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake @@ -6,8 +6,8 @@ if (COMP_NAME STREQUAL gptl) endif() string(APPEND CPPDEFS " -DTHRUST_IGNORE_CUB_VERSION_CHECK") string(APPEND CMAKE_CUDA_FLAGS " -ccbin CC -O2 -arch sm_80 --use_fast_math") -string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel") +string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc80 -Minfo=accel") set(SCC "cc") set(SCXX "CC") set(SFC "ftn") -string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel") +string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc80 -Minfo=accel") From cdadd6f8d0ae827b29f7f5e094c7ef94ec44af4f Mon Sep 17 00:00:00 2001 From: Gregory Lemieux Date: Tue, 11 Jun 2024 08:10:30 -0700 Subject: [PATCH 22/40] update fates tag to sci.1.76.4_api.35.1.0 --- components/elm/src/external_models/fates | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/elm/src/external_models/fates b/components/elm/src/external_models/fates index f0185f7c7033..b8e4eee5ed46 160000 --- a/components/elm/src/external_models/fates +++ b/components/elm/src/external_models/fates @@ -1 +1 @@ -Subproject commit f0185f7c7033fa69c80d1ddb07cbcbf1f8be1adc +Subproject commit b8e4eee5ed46daf5c9e710e9ebbe6d20464adbc8 From 227228606a61f62803bd42d64fb3ec6058c8ad00 Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Wed, 12 Jun 2024 10:13:35 -0500 Subject: [PATCH 23/40] Fix domain filename to not include _mask --- cime_config/config_grids.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml index 714727ad9289..41f352f764a7 100755 --- a/cime_config/config_grids.xml +++ b/cime_config/config_grids.xml @@ -2583,7 +2583,7 @@ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx3v7.090911.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU480.240513.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240.240513.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI_mask.240513.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI.240513.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU120.240513.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3.240513.nc $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3wLI.240513.nc From 77843bac8897d1b96adf96070c63aa370d565744 Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Mon, 8 Apr 2024 16:29:30 -0500 Subject: [PATCH 24/40] add data rof for moab driver use a data stream file, instead of rof_mesh file --- .../data_comps/drof/src/drof_comp_mod.F90 | 185 +++++++++++++++++- .../data_comps/drof/src/rof_comp_mct.F90 | 38 +++- driver-moab/main/cplcomp_exchange_mod.F90 | 14 +- driver-moab/shr/seq_comm_mct.F90 | 1 + driver-moab/shr/seq_infodata_mod.F90 | 17 +- 5 files changed, 242 insertions(+), 13 deletions(-) diff --git a/components/data_comps/drof/src/drof_comp_mod.F90 b/components/data_comps/drof/src/drof_comp_mod.F90 index cb060e02ff84..3be703ec6998 100644 --- a/components/data_comps/drof/src/drof_comp_mod.F90 +++ b/components/data_comps/drof/src/drof_comp_mod.F90 @@ -25,7 +25,11 @@ module drof_comp_mod use drof_shr_mod , only: rest_file ! namelist input use drof_shr_mod , only: rest_file_strm ! namelist input use drof_shr_mod , only: nullstr - +#ifdef HAVE_MOAB + use seq_comm_mct, only : mrofid ! id of moab rof app + use seq_comm_mct, only : mbrof_data ! turn on if the data rof + use iso_c_binding +#endif ! ! !PUBLIC TYPES: implicit none @@ -67,6 +71,12 @@ subroutine drof_comp_init(Eclock, x2r, r2x, & SDROF, gsmap, ggrid, mpicom, compid, my_task, master_task, & inst_suffix, inst_name, logunit, read_restart) +#ifdef HAVE_MOAB + use iMOAB, only: iMOAB_DefineTagStorage, iMOAB_GetDoubleTagStorage, & + iMOAB_SetIntTagStorage, iMOAB_SetDoubleTagStorage, & + iMOAB_ResolveSharedEntities, iMOAB_CreateVertices, & + iMOAB_GetMeshInfo, iMOAB_UpdateMeshInfo, iMOAB_WriteMesh +#endif ! !DESCRIPTION: initialize drof model implicit none @@ -92,7 +102,19 @@ subroutine drof_comp_init(Eclock, x2r, r2x, & logical :: exists ! file existance logical integer(IN) :: nu ! unit number character(CL) :: calendar ! model calendar - +#ifdef HAVE_MOAB + character*400 tagname + real(R8) latv, lonv + integer iv, tagindex, ilat, ilon, ierr !, arrsize, nfields + real(R8), allocatable, target :: data(:) + integer(IN), pointer :: idata(:) ! temporary + real(r8), dimension(:), allocatable :: moab_vert_coords ! temporary + !real(R8), allocatable, target :: vtags_zero(:, :) + +#ifdef MOABDEBUG + character*100 outfile, wopts +#endif +#endif !--- formats --- character(*), parameter :: F00 = "('(drof_comp_init) ',8a)" character(*), parameter :: F0L = "('(drof_comp_init) ',a, l2)" @@ -164,6 +186,121 @@ subroutine drof_comp_init(Eclock, x2r, r2x, & call t_stopf('drof_initmctdom') + +#ifdef HAVE_MOAB + ilat = mct_aVect_indexRA(ggrid%data,'lat') + ilon = mct_aVect_indexRA(ggrid%data,'lon') + allocate(moab_vert_coords(lsize*3)) + do iv = 1, lsize + lonv = ggrid%data%rAttr(ilon, iv) * SHR_CONST_PI/180. + latv = ggrid%data%rAttr(ilat, iv) * SHR_CONST_PI/180. + moab_vert_coords(3*iv-2)=COS(latv)*COS(lonv) + moab_vert_coords(3*iv-1)=COS(latv)*SIN(lonv) + moab_vert_coords(3*iv )=SIN(latv) + enddo + + ! create the vertices with coordinates from MCT domain + ierr = iMOAB_CreateVertices(mrofid, lsize*3, 3, moab_vert_coords) + if (ierr .ne. 0) & + call shr_sys_abort('Error: fail to create MOAB vertices in land model') + + tagname='GLOBAL_ID'//C_NULL_CHAR + ierr = iMOAB_DefineTagStorage(mrofid, tagname, & + 0, & ! dense, integer + 1, & ! number of components + tagindex ) + if (ierr .ne. 0) & + call shr_sys_abort('Error: fail to retrieve GLOBAL_ID tag ') + + ! get list of global IDs for Dofs + call mct_gsMap_orderedPoints(gsMap, my_task, idata) + + ierr = iMOAB_SetIntTagStorage ( mrofid, tagname, lsize, & + 0, & ! vertex type + idata) + if (ierr .ne. 0) & + call shr_sys_abort('Error: fail to set GLOBAL_ID tag ') + + ierr = iMOAB_ResolveSharedEntities( mrofid, lsize, idata ); + if (ierr .ne. 0) & + call shr_sys_abort('Error: fail to resolve shared entities') + + deallocate(moab_vert_coords) + deallocate(idata) + + ierr = iMOAB_UpdateMeshInfo( mrofid ) + if (ierr .ne. 0) & + call shr_sys_abort('Error: fail to update mesh info ') + + allocate(data(lsize)) + ierr = iMOAB_DefineTagStorage( mrofid, "area:aream:frac:mask"//C_NULL_CHAR, & + 1, & ! dense, double + 1, & ! number of components + tagindex ) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to create tag: area:aream:frac:mask' ) + + data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'area'),:) + tagname='area'//C_NULL_CHAR + ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, & + 0, & ! set data on vertices + data) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to get area tag ') + + ! set the same data for aream (model area) as area + ! data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'aream'),:) + tagname='aream'//C_NULL_CHAR + ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, & + 0, & ! set data on vertices + data) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to set aream tag ') + + data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'mask'),:) + tagname='mask'//C_NULL_CHAR + ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, & + 0, & ! set data on vertices + data) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to set mask tag ') + + data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'frac'),:) + tagname='frac'//C_NULL_CHAR + ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, & + 0, & ! set data on vertices + data) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to set frac tag ') + + deallocate(data) + + ! define tags + ierr = iMOAB_DefineTagStorage( mrofid, trim(seq_flds_x2r_fields)//C_NULL_CHAR, & + 1, & ! dense, double + 1, & ! number of components + tagindex ) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to create seq_flds_x2r_fields tags ') + + ierr = iMOAB_DefineTagStorage( mrofid, trim(seq_flds_r2x_fields)//C_NULL_CHAR, & + 1, & ! dense, double + 1, & ! number of components + tagindex ) + if (ierr > 0 ) & + call shr_sys_abort('Error: fail to create seq_flds_r2x_fields tags ') + mbrof_data = .true. ! will have effects +#ifdef MOABDEBUG + ! debug test + outfile = 'RofDataMesh.h5m'//C_NULL_CHAR + wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! + ! write out the mesh file to disk + ierr = iMOAB_WriteMesh(mrofid, trim(outfile), trim(wopts)) + if (ierr .ne. 0) then + call shr_sys_abort(subname//' ERROR in writing data mesh rof ') + endif +#endif +#endif !---------------------------------------------------------------------------- ! Initialize MCT attribute vectors !---------------------------------------------------------------------------- @@ -256,6 +393,13 @@ subroutine drof_comp_run(EClock, x2r, r2x, & SDROF, gsmap, ggrid, mpicom, compid, my_task, master_task, & inst_suffix, logunit, case_name) +#ifdef MOABDEBUG + use iMOAB, only: iMOAB_WriteMesh +#endif +#ifdef HAVE_MOAB + use seq_flds_mod , only: seq_flds_r2x_fields + use seq_flds_mod , only: moab_set_tag_from_av +#endif ! !DESCRIPTION: run method for drof model implicit none @@ -285,7 +429,18 @@ subroutine drof_comp_run(EClock, x2r, r2x, & integer(IN) :: nu ! unit number integer(IN) :: nflds_r2x character(len=18) :: date_str +#ifdef HAVE_MOAB + real(R8), allocatable, target :: datam(:) + type(mct_list) :: temp_list + integer :: size_list, index_list + type(mct_string) :: mctOStr ! + character*400 tagname, mct_field +#ifdef MOABDEBUG + integer :: cur_drof_stepno, ierr + character*100 outfile, wopts, lnum +#endif +#endif character(*), parameter :: F00 = "('(drof_comp_run) ',8a)" character(*), parameter :: F04 = "('(drof_comp_run) ',2a,2i8,'s')" character(*), parameter :: subName = "(drof_comp_run) " @@ -384,6 +539,32 @@ subroutine drof_comp_run(EClock, x2r, r2x, & !---------------------------------------------------------------------------- ! Log output for model date !---------------------------------------------------------------------------- +#ifdef HAVE_MOAB + lsize = mct_avect_lsize(r2x) ! is it the same as mct_avect_lsize(avstrm) ? + allocate(datam(lsize)) ! + call mct_list_init(temp_list ,seq_flds_r2x_fields) + size_list=mct_list_nitem (temp_list) + do index_list = 1, size_list + call mct_list_get(mctOStr,index_list,temp_list) + mct_field = mct_string_toChar(mctOStr) + tagname= trim(mct_field)//C_NULL_CHAR + call moab_set_tag_from_av(tagname, r2x, index_list, mrofid, datam, lsize) ! loop over all a2x fields, not just a few + enddo + call mct_list_clean(temp_list) + deallocate(datam) ! maybe we should keep it around, deallocate at the final only? + +#ifdef MOABDEBUG + call seq_timemgr_EClockGetData( EClock, stepno=cur_drof_stepno ) + write(lnum,"(I0.2)")cur_drof_stepno + outfile = 'drof_comp_run_'//trim(lnum)//'.h5m'//C_NULL_CHAR + wopts = 'PARALLEL=WRITE_PART'//C_NULL_CHAR + ierr = iMOAB_WriteMesh(mrofid, outfile, wopts) + if (ierr > 0 ) then + write(logunit,*) 'Failed to write data rof component state ' + endif +#endif + +#endif call t_startf('drof_run2') if (my_task == master_task) then diff --git a/components/data_comps/drof/src/rof_comp_mct.F90 b/components/data_comps/drof/src/rof_comp_mct.F90 index bafdc6d3f988..7257028e7b52 100644 --- a/components/data_comps/drof/src/rof_comp_mct.F90 +++ b/components/data_comps/drof/src/rof_comp_mct.F90 @@ -16,7 +16,11 @@ module rof_comp_mct use drof_comp_mod , only: drof_comp_init, drof_comp_run, drof_comp_final use drof_shr_mod , only: drof_shr_read_namelists use seq_flds_mod , only: seq_flds_x2r_fields, seq_flds_r2x_fields - +#ifdef HAVE_MOAB + use seq_comm_mct, only : mrofid ! iMOAB app id for rof + use iso_c_binding + use iMOAB , only: iMOAB_RegisterApplication +#endif ! !PUBLIC TYPES: implicit none private ! except @@ -53,6 +57,9 @@ module rof_comp_mct !=============================================================================== subroutine rof_init_mct( EClock, cdata, x2r, r2x, NLFilename ) +#ifdef HAVE_MOAB + use shr_stream_mod, only: shr_stream_getDomainInfo, shr_stream_getFile +#endif ! !DESCRIPTION: initialize drof model implicit none @@ -74,6 +81,16 @@ subroutine rof_init_mct( EClock, cdata, x2r, r2x, NLFilename ) integer(IN) :: shrloglev ! original log level logical :: read_restart ! start from restart integer(IN) :: ierr ! error code + + character(CL) :: filePath ! generic file path + character(CL) :: fileName ! generic file name + character(CS) :: timeName ! domain file: time variable name + character(CS) :: lonName ! domain file: lon variable name + character(CS) :: latName ! domain file: lat variable name + character(CS) :: hgtName ! domain file: hgt variable name + character(CS) :: maskName ! domain file: mask variable name + character(CS) :: areaName ! domain file: area variable name + character(*), parameter :: subName = "(rof_init_mct) " !------------------------------------------------------------------------------- @@ -140,11 +157,28 @@ subroutine rof_init_mct( EClock, cdata, x2r, r2x, NLFilename ) ! Initialize drof !---------------------------------------------------------------------------- +#ifdef HAVE_MOAB + ierr = iMOAB_RegisterApplication(trim("DROF")//C_NULL_CHAR, mpicom, compid, mrofid) + if (ierr .ne. 0) then + write(logunit,*) subname,' error in registering data rof comp' + call shr_sys_abort(subname//' ERROR in registering data rof comp') + endif +#endif + call drof_comp_init(Eclock, x2r, r2x, & seq_flds_x2r_fields, seq_flds_r2x_fields, & SDROF, gsmap, ggrid, mpicom, compid, my_task, master_task, & inst_suffix, inst_name, logunit, read_restart) - +#ifdef HAVE_MOAB + if (my_task == master_task) then + call shr_stream_getDomainInfo(SDROF%stream(1), filePath,fileName,timeName,lonName, & + latName,hgtName,maskName,areaName) + call shr_stream_getFile(filePath,fileName) + ! send path of river domain to MOAB coupler. + call seq_infodata_PutData( infodata, rof_domain=fileName) + write(logunit,*), ' filename: ', filename + endif +#endif !---------------------------------------------------------------------------- ! Fill infodata that needs to be returned from drof !---------------------------------------------------------------------------- diff --git a/driver-moab/main/cplcomp_exchange_mod.F90 b/driver-moab/main/cplcomp_exchange_mod.F90 index 2fd8bb1f5f60..51b37b2e9904 100644 --- a/driver-moab/main/cplcomp_exchange_mod.F90 +++ b/driver-moab/main/cplcomp_exchange_mod.F90 @@ -1017,7 +1017,7 @@ subroutine cplcomp_moab_Init(infodata,comp) integer :: mpigrp_old ! component group pes integer :: ierr, context_id character*200 :: appname, outfile, wopts, ropts - character(CL) :: rtm_mesh + character(CL) :: rtm_mesh, rof_domain character(CL) :: lnd_domain character(CL) :: ocn_domain character(CL) :: atm_mesh @@ -1633,10 +1633,14 @@ subroutine cplcomp_moab_Init(infodata,comp) ierr = iMOAB_RegisterApplication(trim(appname), mpicom_new, id_join, mbrxid) ! load mesh from scrip file passed from river model - call seq_infodata_GetData(infodata,rof_mesh=rtm_mesh) - outfile = trim(rtm_mesh)//C_NULL_CHAR - ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN'//C_NULL_CHAR - + call seq_infodata_GetData(infodata,rof_mesh=rtm_mesh,rof_domain=rof_domain) + if ( trim(rof_domain) == 'none' ) then + outfile = trim(rtm_mesh)//C_NULL_CHAR + ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN'//C_NULL_CHAR + else + outfile = trim(rof_domain)//C_NULL_CHAR + ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE='//C_NULL_CHAR + endif nghlay = 0 ! no ghost layers ierr = iMOAB_LoadMesh(mbrxid, outfile, ropts, nghlay) if ( ierr .ne. 0 ) then diff --git a/driver-moab/shr/seq_comm_mct.F90 b/driver-moab/shr/seq_comm_mct.F90 index 10a23b9c5094..44d29e91ad2c 100644 --- a/driver-moab/shr/seq_comm_mct.F90 +++ b/driver-moab/shr/seq_comm_mct.F90 @@ -241,6 +241,7 @@ module seq_comm_mct integer, public :: mbrmapro ! iMOAB id for read map between river and ocean; it exists on coupler PEs ! similar to intx id, oa, la; integer, public :: mbrxoid ! iMOAB id for rof migrated to coupler for ocean context (r2o mapping) + logical, public :: mbrof_data = .false. ! made true if no rtm mesh, which means data rof ? integer, public :: mbintxar ! iMOAB id for intx mesh between atm and river integer, public :: mbintxlr ! iMOAB id for intx mesh between land and river integer, public :: mbintxrl ! iMOAB id for intx mesh between river and land diff --git a/driver-moab/shr/seq_infodata_mod.F90 b/driver-moab/shr/seq_infodata_mod.F90 index 77236e027cc3..2e9838bc69c5 100644 --- a/driver-moab/shr/seq_infodata_mod.F90 +++ b/driver-moab/shr/seq_infodata_mod.F90 @@ -232,6 +232,7 @@ MODULE seq_infodata_mod integer(SHR_KIND_IN) :: iac_ny ! nx, ny of "2d" grid character(SHR_KIND_CL) :: lnd_domain ! path to land domain file character(SHR_KIND_CL) :: rof_mesh ! path to river mesh file + character(SHR_KIND_CL) :: rof_domain ! path to river domain file; only for data rof for now character(SHR_KIND_CL) :: ocn_domain ! path to ocean domain file, used by data ocean models only character(SHR_KIND_CL) :: atm_mesh ! path to atmosphere domain/mesh file, used by data atm models only @@ -792,6 +793,7 @@ SUBROUTINE seq_infodata_Init( infodata, nmlfile, ID, pioid, cpl_tag) infodata%iac_ny = 0 infodata%lnd_domain = 'none' infodata%rof_mesh = 'none' + infodata%rof_domain = 'none' infodata%ocn_domain = 'none' ! will be used for ocean data models only; will be used as a signal infodata%atm_mesh = 'none' ! will be used for atmosphere data models only; will be used as a signal ! not sure if it exists always actually @@ -1037,8 +1039,8 @@ SUBROUTINE seq_infodata_GetData_explicit( infodata, cime_model, case_name, case_ glc_phase, rof_phase, atm_phase, lnd_phase, ocn_phase, ice_phase, & wav_phase, iac_phase, esp_phase, wav_nx, wav_ny, atm_nx, atm_ny, & lnd_nx, lnd_ny, rof_nx, rof_ny, ice_nx, ice_ny, ocn_nx, ocn_ny, & - iac_nx, iac_ny, glc_nx, glc_ny, lnd_domain, rof_mesh, ocn_domain, & - atm_mesh, eps_frac, & + iac_nx, iac_ny, glc_nx, glc_ny, lnd_domain, rof_mesh, rof_domain, & + ocn_domain, atm_mesh, eps_frac, & eps_amask, eps_agrid, eps_aarea, eps_omask, eps_ogrid, eps_oarea, & reprosum_use_ddpdd, reprosum_allow_infnan, & reprosum_diffmax, reprosum_recompute, & @@ -1212,6 +1214,7 @@ SUBROUTINE seq_infodata_GetData_explicit( infodata, cime_model, case_name, case_ integer(SHR_KIND_IN), optional, intent(OUT) :: iac_ny character(SHR_KIND_CL), optional, intent(OUT) :: lnd_domain character(SHR_KIND_CL), optional, intent(OUT) :: rof_mesh + character(SHR_KIND_CL), optional, intent(OUT) :: rof_domain character(SHR_KIND_CL), optional, intent(OUT) :: ocn_domain character(SHR_KIND_CL), optional, intent(OUT) :: atm_mesh @@ -1401,6 +1404,7 @@ SUBROUTINE seq_infodata_GetData_explicit( infodata, cime_model, case_name, case_ if ( present(iac_ny) ) iac_ny = infodata%iac_ny if ( present(lnd_domain) ) lnd_domain = infodata%lnd_domain if ( present(rof_mesh) ) rof_mesh = infodata%rof_mesh + if ( present(rof_domain) ) rof_domain = infodata%rof_domain if ( present(ocn_domain) ) ocn_domain = infodata%ocn_domain if ( present(atm_mesh) ) atm_mesh = infodata%atm_mesh @@ -1598,8 +1602,8 @@ SUBROUTINE seq_infodata_PutData_explicit( infodata, cime_model, case_name, case_ wav_phase, iac_phase, esp_phase, wav_nx, wav_ny, atm_nx, atm_ny, & lnd_nx, lnd_ny, rof_nx, rof_ny, ice_nx, ice_ny, ocn_nx, ocn_ny, & iac_nx, iac_ny, glc_nx, glc_ny, eps_frac, eps_amask, lnd_domain, & - rof_mesh, ocn_domain, atm_mesh, eps_agrid, eps_aarea, eps_omask, & - eps_ogrid, eps_oarea, & + rof_mesh, rof_domain, ocn_domain, atm_mesh, eps_agrid, eps_aarea, & + eps_omask, eps_ogrid, eps_oarea, & reprosum_use_ddpdd, reprosum_allow_infnan, & reprosum_diffmax, reprosum_recompute, & mct_usealltoall, mct_usevector, glc_valid_input, nlmaps_verbosity) @@ -1771,6 +1775,7 @@ SUBROUTINE seq_infodata_PutData_explicit( infodata, cime_model, case_name, case_ integer(SHR_KIND_IN), optional, intent(IN) :: iac_ny character(SHR_KIND_CL), optional, intent(IN) :: lnd_domain character(SHR_KIND_CL), optional, intent(IN) :: rof_mesh + character(SHR_KIND_CL), optional, intent(IN) :: rof_domain character(SHR_KIND_CL), optional, intent(IN) :: ocn_domain character(SHR_KIND_CL), optional, intent(IN) :: atm_mesh @@ -1959,6 +1964,7 @@ SUBROUTINE seq_infodata_PutData_explicit( infodata, cime_model, case_name, case_ if ( present(iac_ny) ) infodata%iac_ny = iac_ny if ( present(lnd_domain) ) infodata%lnd_domain = lnd_domain if ( present(rof_mesh) ) infodata%rof_mesh = rof_mesh + if ( present(rof_domain) ) infodata%rof_domain = rof_domain if ( present(ocn_domain) ) infodata%ocn_domain = ocn_domain if ( present(atm_mesh) ) infodata%atm_mesh = atm_mesh @@ -2271,6 +2277,7 @@ subroutine seq_infodata_bcast(infodata,mpicom) call shr_mpi_bcast(infodata%iac_ny, mpicom) call shr_mpi_bcast(infodata%lnd_domain, mpicom) call shr_mpi_bcast(infodata%rof_mesh, mpicom) + call shr_mpi_bcast(infodata%rof_domain, mpicom) call shr_mpi_bcast(infodata%ocn_domain, mpicom) call shr_mpi_bcast(infodata%atm_mesh, mpicom) call shr_mpi_bcast(infodata%nextsw_cday, mpicom) @@ -2518,6 +2525,7 @@ subroutine seq_infodata_Exchange(infodata,ID,type) call shr_mpi_bcast(infodata%rof_ny, mpicom, pebcast=cmppe) call shr_mpi_bcast(infodata%flood_present, mpicom, pebcast=cmppe) call shr_mpi_bcast(infodata%rof_mesh, mpicom, pebcast=cmppe) + call shr_mpi_bcast(infodata%rof_domain, mpicom, pebcast=cmppe) ! dead_comps is true if it's ever set to true deads = infodata%dead_comps call shr_mpi_bcast(deads, mpicom, pebcast=cmppe) @@ -2990,6 +2998,7 @@ SUBROUTINE seq_infodata_print( infodata ) write(logunit,F0I) subname,'iac_ny = ', infodata%iac_ny write(logunit,F0I) subname,'lnd_domain = ', infodata%lnd_domain write(logunit,F0I) subname,'rof_mesh = ', infodata%rof_mesh + write(logunit,F0I) subname,'rof_domain = ', infodata%rof_domain write(logunit,F0I) subname,'ocn_domain = ', infodata%ocn_domain write(logunit,F0I) subname,'atm_mesh = ', infodata%atm_mesh From 14737fe365b556bc69ad67d8f5430d49d5a1930a Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Wed, 17 Apr 2024 13:52:47 -0500 Subject: [PATCH 25/40] use repartition (zoltan) for domain meshes it depends on a moab branch: iulian07/domain_extension --- driver-moab/main/cplcomp_exchange_mod.F90 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/driver-moab/main/cplcomp_exchange_mod.F90 b/driver-moab/main/cplcomp_exchange_mod.F90 index 51b37b2e9904..476b846a66df 100644 --- a/driver-moab/main/cplcomp_exchange_mod.F90 +++ b/driver-moab/main/cplcomp_exchange_mod.F90 @@ -1112,7 +1112,7 @@ subroutine cplcomp_moab_Init(infodata,comp) else ! we need to read the atm mesh on coupler, from domain file ierr = iMOAB_LoadMesh(mbaxid, trim(atm_mesh)//C_NULL_CHAR, & - "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;NO_CULLING", 0) + "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;REPARTITION;NO_CULLING", 0) if ( ierr /= 0 ) then write(logunit,*) 'Failed to load atm domain mesh on coupler' call shr_sys_abort(subname//' ERROR Failed to load atm domain mesh on coupler ') @@ -1290,7 +1290,7 @@ subroutine cplcomp_moab_Init(infodata,comp) else ! we need to read the ocean mesh on coupler, from domain file ierr = iMOAB_LoadMesh(mboxid, trim(ocn_domain)//C_NULL_CHAR, & - "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;NO_CULLING", 0) + "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;NO_CULLING;REPARTITION", 0) if ( ierr /= 0 ) then write(logunit,*) 'Failed to load ocean domain mesh on coupler' call shr_sys_abort(subname//' ERROR Failed to load ocean domain mesh on coupler ') @@ -1399,7 +1399,7 @@ subroutine cplcomp_moab_Init(infodata,comp) else ! we need to read the ocean mesh on coupler, from domain file ierr = iMOAB_LoadMesh(mbofxid, trim(ocn_domain)//C_NULL_CHAR, & - "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;NO_CULLING", 0) + "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;NO_CULLING;REPARTITION", 0) if ( ierr /= 0 ) then write(logunit,*) 'Failed to load second ocean domain mesh on coupler' call shr_sys_abort(subname//' ERROR Failed to load second ocean domain mesh on coupler ') @@ -1445,7 +1445,7 @@ subroutine cplcomp_moab_Init(infodata,comp) ! do not receive the mesh anymore, read it from file, then pair it with mlnid, component land PC mesh ! similar to rof mosart mesh - ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE='//C_NULL_CHAR + ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;REPARTITION'//C_NULL_CHAR call seq_infodata_GetData(infodata,lnd_domain=lnd_domain) outfile = trim(lnd_domain)//C_NULL_CHAR nghlay = 0 ! no ghost layers @@ -1639,7 +1639,7 @@ subroutine cplcomp_moab_Init(infodata,comp) ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN'//C_NULL_CHAR else outfile = trim(rof_domain)//C_NULL_CHAR - ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE='//C_NULL_CHAR + ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;REPARTITION'//C_NULL_CHAR endif nghlay = 0 ! no ghost layers ierr = iMOAB_LoadMesh(mbrxid, outfile, ropts, nghlay) From c2ab50280e9f7384c0ac5e1a976628000153114c Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Thu, 18 Apr 2024 16:21:55 -0500 Subject: [PATCH 26/40] record in the cpl.log the mesh/domain files used by moab coupler --- driver-moab/main/cplcomp_exchange_mod.F90 | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/driver-moab/main/cplcomp_exchange_mod.F90 b/driver-moab/main/cplcomp_exchange_mod.F90 index 476b846a66df..0e2682192697 100644 --- a/driver-moab/main/cplcomp_exchange_mod.F90 +++ b/driver-moab/main/cplcomp_exchange_mod.F90 @@ -1117,6 +1117,9 @@ subroutine cplcomp_moab_Init(infodata,comp) write(logunit,*) 'Failed to load atm domain mesh on coupler' call shr_sys_abort(subname//' ERROR Failed to load atm domain mesh on coupler ') endif + if (seq_comm_iamroot(CPLID)) then + write(logunit,'(A)') subname//' load atm domain mesh from file '//trim(atm_mesh) + endif ! right now, turn atm_pg_active to true atm_pg_active = .true. ! FIXME TODO ! need to add global id tag to the app, it will be used in restart @@ -1295,6 +1298,9 @@ subroutine cplcomp_moab_Init(infodata,comp) write(logunit,*) 'Failed to load ocean domain mesh on coupler' call shr_sys_abort(subname//' ERROR Failed to load ocean domain mesh on coupler ') endif + if (seq_comm_iamroot(CPLID)) then + write(logunit,'(A)') subname//' load ocn domain mesh from file '//trim(ocn_domain) + endif ! need to add global id tag to the app, it will be used in restart tagtype = 0 ! dense, integer numco = 1 @@ -1404,6 +1410,9 @@ subroutine cplcomp_moab_Init(infodata,comp) write(logunit,*) 'Failed to load second ocean domain mesh on coupler' call shr_sys_abort(subname//' ERROR Failed to load second ocean domain mesh on coupler ') endif + if (seq_comm_iamroot(CPLID)) then + write(logunit,'(A)') subname//' load ocn domain mesh from file for second ocn instance '//trim(ocn_domain) + endif ! need to add global id tag to the app, it will be used in restart tagtype = 0 ! dense, integer numco = 1 @@ -1458,6 +1467,9 @@ subroutine cplcomp_moab_Init(infodata,comp) write(logunit,*) subname,' error in reading land coupler mesh from ', trim(lnd_domain) call shr_sys_abort(subname//' ERROR in reading land coupler mesh') endif + if (seq_comm_iamroot(CPLID)) then + write(logunit,'(A)') subname//' load lnd domain mesh from file '//trim(lnd_domain) + endif ! need to add global id tag to the app, it will be used in restart tagtype = 0 ! dense, integer numco = 1 @@ -1643,9 +1655,13 @@ subroutine cplcomp_moab_Init(infodata,comp) endif nghlay = 0 ! no ghost layers ierr = iMOAB_LoadMesh(mbrxid, outfile, ropts, nghlay) + if (seq_comm_iamroot(CPLID)) then + write(logunit,'(A)') subname//' load rof from file '//trim(outfile) + endif if ( ierr .ne. 0 ) then call shr_sys_abort( subname//' ERROR: cannot read rof mesh on coupler' ) end if + ! need to add global id tag to the app, it will be used in restart tagtype = 0 ! dense, integer numco = 1 From 9a229db9677b525347236194495bb8bcbec74d15 Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Mon, 22 Apr 2024 08:21:22 -0500 Subject: [PATCH 27/40] change message in comparison MOABCOMP on cpl --- driver-moab/main/component_type_mod.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/driver-moab/main/component_type_mod.F90 b/driver-moab/main/component_type_mod.F90 index 85a300356c9a..ebb3ad8f58e3 100644 --- a/driver-moab/main/component_type_mod.F90 +++ b/driver-moab/main/component_type_mod.F90 @@ -508,7 +508,7 @@ subroutine compare_mct_av_moab_tag(comp, attrVect, mct_field, appId, tagname, en difference = sqrt(differenceg) iamroot = seq_comm_iamroot(CPLID) if ( iamroot ) then - print * , subname, trim(comp%ntype), ' comp, difference on tag ', trim(tagname), ' = ', difference + print * , subname, trim(comp%ntype), ' on cpl, difference on tag ', trim(tagname), ' = ', difference !call shr_sys_abort(subname//'differences between mct and moab values') endif deallocate(GlobalIds) From 5bfe5c99e347054d7abcd312e694948994172c18 Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Mon, 22 Apr 2024 08:36:44 -0500 Subject: [PATCH 28/40] iMOAB_MergeVertices is not used --- components/elm/src/cpl/lnd_comp_mct.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/elm/src/cpl/lnd_comp_mct.F90 b/components/elm/src/cpl/lnd_comp_mct.F90 index 6b006b5d9188..e533d5bcbc2b 100644 --- a/components/elm/src/cpl/lnd_comp_mct.F90 +++ b/components/elm/src/cpl/lnd_comp_mct.F90 @@ -846,7 +846,7 @@ subroutine init_moab_land(bounds, LNDID) use spmdmod , only: masterproc use iMOAB , only: iMOAB_CreateVertices, iMOAB_WriteMesh, iMOAB_RegisterApplication, & iMOAB_DefineTagStorage, iMOAB_SetIntTagStorage, iMOAB_SetDoubleTagStorage, & - iMOAB_ResolveSharedEntities, iMOAB_CreateElements, iMOAB_MergeVertices, iMOAB_UpdateMeshInfo + iMOAB_ResolveSharedEntities, iMOAB_CreateElements, iMOAB_UpdateMeshInfo type(bounds_type) , intent(in) :: bounds integer , intent(in) :: LNDID ! id of the land app From 6d84b34cf457e9dd82bd548892dfdaf80501849b Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Mon, 22 Apr 2024 08:37:37 -0500 Subject: [PATCH 29/40] add more debug for rof 2 ocn projections not clear yet why projection has wrong results for rofi --- driver-moab/main/prep_ocn_mod.F90 | 39 ++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/driver-moab/main/prep_ocn_mod.F90 b/driver-moab/main/prep_ocn_mod.F90 index 7514cc40db01..74c6936eda15 100644 --- a/driver-moab/main/prep_ocn_mod.F90 +++ b/driver-moab/main/prep_ocn_mod.F90 @@ -1985,6 +1985,9 @@ subroutine prep_ocn_mrg_moab(infodata, xao_ox) outfile = 'OcnCplAftMm'//trim(lnum)//'.h5m'//C_NULL_CHAR wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) + if (ierr .ne. 0) then + call shr_sys_abort(subname//' error in writing ocean after merging') + endif endif #endif if (first_time) then @@ -2846,7 +2849,17 @@ subroutine prep_ocn_calc_r2x_ox(timer) !--------------------------------------------------------------- ! Description ! Create r2x_ox (note that r2x_ox is a local module variable) - ! +#ifdef MOABDEBUG + use iMOAB, only : iMOAB_WriteMesh + use seq_comm_mct, only: num_moab_exports ! used to count the steps for moab files +#endif + ! Arguments + + ! Local Variables +#ifdef MOABDEBUG + character*32 :: outfile, wopts, lnum + integer :: ierr +#endif ! Arguments character(len=*), intent(in) :: timer ! @@ -2858,17 +2871,37 @@ subroutine prep_ocn_calc_r2x_ox(timer) call t_drvstartf (trim(timer),barrier=mpicom_CPLID) do eri = 1,num_inst_rof +#ifdef MOABDEBUG + if (mboxid .ge. 0 ) then ! we are on coupler pes, for sure + write(lnum,"(I0.2)") num_moab_exports + outfile = 'OcnCpl_Bef_R2O_'//trim(lnum)//'.h5m'//C_NULL_CHAR + wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! + ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) + if (ierr .ne. 0) then + call shr_sys_abort(subname//' error in writing ocean after Rof 2 ocn proj') + endif + endif +#endif r2x_rx => component_get_c2x_cx(rof(eri)) call seq_map_map(mapper_Rr2o_liq, r2x_rx, r2x_ox(eri), & fldlist=seq_flds_r2o_liq_fluxes, norm=.false.) - call seq_map_map(mapper_Rr2o_ice, r2x_rx, r2x_ox(eri), & fldlist=seq_flds_r2o_ice_fluxes, norm=.false.) - if (flood_present) then call seq_map_map(mapper_Fr2o, r2x_rx, r2x_ox(eri), & fldlist='Flrr_flood', norm=.true.) endif +#ifdef MOABDEBUG + if (mboxid .ge. 0 ) then ! we are on coupler pes, for sure + write(lnum,"(I0.2)") num_moab_exports + outfile = 'OcnCpl_Aft_R2O_'//trim(lnum)//'.h5m'//C_NULL_CHAR + wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! + ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) + if (ierr .ne. 0) then + call shr_sys_abort(subname//' error in writing ocean after Rof 2 ocn proj') + endif + endif +#endif enddo call t_drvstopf (trim(timer)) From 023f963f6c73e7e8a3f9fa617c8ccbd8b596bb75 Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Mon, 22 Apr 2024 17:04:57 -0500 Subject: [PATCH 30/40] compare Forr_rofi fields in mct and moab the moab one has extra -1s after projection which is wrong the issue is how we project the map from file we do or we do not consider fractions, masks? --- driver-moab/main/prep_ocn_mod.F90 | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/driver-moab/main/prep_ocn_mod.F90 b/driver-moab/main/prep_ocn_mod.F90 index 74c6936eda15..4ca803150801 100644 --- a/driver-moab/main/prep_ocn_mod.F90 +++ b/driver-moab/main/prep_ocn_mod.F90 @@ -2859,6 +2859,11 @@ subroutine prep_ocn_calc_r2x_ox(timer) #ifdef MOABDEBUG character*32 :: outfile, wopts, lnum integer :: ierr +#endif +#ifdef MOABCOMP + character*100 :: tagname, mct_field + integer :: ent_type + real*8 :: difference #endif ! Arguments character(len=*), intent(in) :: timer @@ -2878,7 +2883,7 @@ subroutine prep_ocn_calc_r2x_ox(timer) wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) if (ierr .ne. 0) then - call shr_sys_abort(subname//' error in writing ocean after Rof 2 ocn proj') + call shr_sys_abort(subname//' error in writing ocean before Rof 2 ocn proj') endif endif #endif @@ -2896,6 +2901,14 @@ subroutine prep_ocn_calc_r2x_ox(timer) write(lnum,"(I0.2)") num_moab_exports outfile = 'OcnCpl_Aft_R2O_'//trim(lnum)//'.h5m'//C_NULL_CHAR wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! + +#ifdef MOABCOMP + ent_type = 1 ! cell for ocean + mct_field = 'Forr_rofi' + tagname= 'Forr_rofi'//C_NULL_CHAR + call compare_mct_av_moab_tag(ocn(1), r2x_ox(1), mct_field, mboxid, tagname, ent_type, difference, .true.) +#endif + ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) if (ierr .ne. 0) then call shr_sys_abort(subname//' error in writing ocean after Rof 2 ocn proj') From 102b54d7bae0a0f63f02e76a5cab1eaf40304aaa Mon Sep 17 00:00:00 2001 From: Iulian Grindeanu Date: Thu, 9 May 2024 14:26:48 -0500 Subject: [PATCH 31/40] remove some debugging code it actually failed on chrysalis, on 4 nodes not clear why, maybe distribution issues, or parameter existence not checked This code looked at projection of Forr_rofi on ocean --- driver-moab/main/prep_ocn_mod.F90 | 30 ------------------------------ 1 file changed, 30 deletions(-) diff --git a/driver-moab/main/prep_ocn_mod.F90 b/driver-moab/main/prep_ocn_mod.F90 index 4ca803150801..44ec0c6a3c7a 100644 --- a/driver-moab/main/prep_ocn_mod.F90 +++ b/driver-moab/main/prep_ocn_mod.F90 @@ -2876,17 +2876,6 @@ subroutine prep_ocn_calc_r2x_ox(timer) call t_drvstartf (trim(timer),barrier=mpicom_CPLID) do eri = 1,num_inst_rof -#ifdef MOABDEBUG - if (mboxid .ge. 0 ) then ! we are on coupler pes, for sure - write(lnum,"(I0.2)") num_moab_exports - outfile = 'OcnCpl_Bef_R2O_'//trim(lnum)//'.h5m'//C_NULL_CHAR - wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! - ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) - if (ierr .ne. 0) then - call shr_sys_abort(subname//' error in writing ocean before Rof 2 ocn proj') - endif - endif -#endif r2x_rx => component_get_c2x_cx(rof(eri)) call seq_map_map(mapper_Rr2o_liq, r2x_rx, r2x_ox(eri), & fldlist=seq_flds_r2o_liq_fluxes, norm=.false.) @@ -2896,25 +2885,6 @@ subroutine prep_ocn_calc_r2x_ox(timer) call seq_map_map(mapper_Fr2o, r2x_rx, r2x_ox(eri), & fldlist='Flrr_flood', norm=.true.) endif -#ifdef MOABDEBUG - if (mboxid .ge. 0 ) then ! we are on coupler pes, for sure - write(lnum,"(I0.2)") num_moab_exports - outfile = 'OcnCpl_Aft_R2O_'//trim(lnum)//'.h5m'//C_NULL_CHAR - wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR ! - -#ifdef MOABCOMP - ent_type = 1 ! cell for ocean - mct_field = 'Forr_rofi' - tagname= 'Forr_rofi'//C_NULL_CHAR - call compare_mct_av_moab_tag(ocn(1), r2x_ox(1), mct_field, mboxid, tagname, ent_type, difference, .true.) -#endif - - ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts)) - if (ierr .ne. 0) then - call shr_sys_abort(subname//' error in writing ocean after Rof 2 ocn proj') - endif - endif -#endif enddo call t_drvstopf (trim(timer)) From e97804082cb9ba44e1dcede5c3e05f6dacb2783d Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Wed, 12 Jun 2024 14:03:55 -0500 Subject: [PATCH 32/40] Make bld files consistent with Registry changes --- .../bld/namelist_files/namelist_definition_mpaso.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml index cd8535a41587..2d31cbf5acf7 100644 --- a/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml +++ b/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml @@ -2737,7 +2737,7 @@ Default: Defined in namelist_defaults.xml category="tracer_forcing_activeTracers" group="tracer_forcing_activeTracers"> Time interval to compute salinity restoring tendency. -Valid values: Any valid time stamp or 'dt' +Valid values: Any valid time stamp e.g. format '0000-00-01_00:00:00' or 'dt' Default: Defined in namelist_defaults.xml @@ -2759,7 +2759,7 @@ Default: Defined in namelist_defaults.xml -Flag to enable salinity restoring under sea ice. The default setting is false, where salinity restoring tapers from full restoring in the open ocean (iceFraction=0.0) to zero restoring below full sea ice coverage (iceFraction=1.0); below partial sea ice coverage, restoring is in proportion to iceFraction. If true, full salinity restoring is used everywhere, regardless of iceFraction value +Flag to enable salinity restoring under sea ice. The default setting is true, meaning that full salinity restoring is used everywhere, regardless of iceFraction value, except under ice shelves. If false, the salinity restoring tapers from full restoring in the open ocean (iceFraction=0.0) to zero restoring below full sea ice coverage (iceFraction=1.0); below partial sea ice coverage, restoring is in proportion to iceFraction. Valid values: .true. or .false. Default: Defined in namelist_defaults.xml From 5f37229a2c50e6da2d832bc11f38120a939faeef Mon Sep 17 00:00:00 2001 From: noel Date: Thu, 13 Jun 2024 09:02:05 -0700 Subject: [PATCH 33/40] add CMAKE_CUDA_ARCHITECTURES as in gnugpu make same change for muller-gpu --- cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake | 8 ++++++++ .../machines/cmake_macros/nvidiagpu_muller-gpu.cmake | 5 +++-- cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake | 3 ++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake b/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake index 17c8066083bb..68f6648c9306 100644 --- a/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake +++ b/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake @@ -10,6 +10,14 @@ string(APPEND KOKKOS_OPTIONS " -DKokkos_ARCH_AMPERE80=On -DKokkos_ENABLE_CUDA=On set(CMAKE_CUDA_ARCHITECTURES "80") string(APPEND CMAKE_C_FLAGS_RELEASE " -O2") string(APPEND CMAKE_Fortran_FLAGS_RELEASE " -O2") + +#if (COMP_NAME STREQUAL elm) +# # don't build with ACC, default for GNU is not not build with ACC +#else() +# string(APPEND CMAKE_Fortran_FLAGS " -fopenacc") +# string(APPEND CMAKE_EXE_LINKER_FLAGS " -fopenacc") +#endif() + set(MPICC "cc") set(MPICXX "CC") set(MPIFC "ftn") diff --git a/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake b/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake index 59758d59989c..93c7cdd16b21 100644 --- a/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake +++ b/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake @@ -6,8 +6,9 @@ if (COMP_NAME STREQUAL gptl) endif() string(APPEND CPPDEFS " -DTHRUST_IGNORE_CUB_VERSION_CHECK") string(APPEND CMAKE_CUDA_FLAGS " -ccbin CC -O2 -arch sm_80 --use_fast_math") -string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel") +set(CMAKE_CUDA_ARCHITECTURES "80") +string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc80 -Minfo=accel") +string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc80 -Minfo=accel") set(SCC "cc") set(SCXX "CC") set(SFC "ftn") -string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel") diff --git a/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake b/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake index 852483663a09..93c7cdd16b21 100644 --- a/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake +++ b/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake @@ -6,8 +6,9 @@ if (COMP_NAME STREQUAL gptl) endif() string(APPEND CPPDEFS " -DTHRUST_IGNORE_CUB_VERSION_CHECK") string(APPEND CMAKE_CUDA_FLAGS " -ccbin CC -O2 -arch sm_80 --use_fast_math") +set(CMAKE_CUDA_ARCHITECTURES "80") +string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc80 -Minfo=accel") string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc80 -Minfo=accel") set(SCC "cc") set(SCXX "CC") set(SFC "ftn") -string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc80 -Minfo=accel") From 578f1bd140b93653a54742d4db18c8e824509047 Mon Sep 17 00:00:00 2001 From: noel Date: Thu, 13 Jun 2024 09:03:51 -0700 Subject: [PATCH 34/40] remove comments accidentally committed --- cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake | 8 -------- 1 file changed, 8 deletions(-) diff --git a/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake b/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake index 68f6648c9306..17c8066083bb 100644 --- a/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake +++ b/cime_config/machines/cmake_macros/gnugpu_pm-gpu.cmake @@ -10,14 +10,6 @@ string(APPEND KOKKOS_OPTIONS " -DKokkos_ARCH_AMPERE80=On -DKokkos_ENABLE_CUDA=On set(CMAKE_CUDA_ARCHITECTURES "80") string(APPEND CMAKE_C_FLAGS_RELEASE " -O2") string(APPEND CMAKE_Fortran_FLAGS_RELEASE " -O2") - -#if (COMP_NAME STREQUAL elm) -# # don't build with ACC, default for GNU is not not build with ACC -#else() -# string(APPEND CMAKE_Fortran_FLAGS " -fopenacc") -# string(APPEND CMAKE_EXE_LINKER_FLAGS " -fopenacc") -#endif() - set(MPICC "cc") set(MPICXX "CC") set(MPIFC "ftn") From 9911e969cd92349fc4634d4648189993d5c13384 Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Thu, 13 Jun 2024 11:33:23 -0500 Subject: [PATCH 35/40] Change default for config_salinity_restoring_under_sea_ice --- .../mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml index 4590e8c3c8a1..98c5b18ce179 100644 --- a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml +++ b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml @@ -663,7 +663,7 @@ 'dt' 1.585e-6 0.5 -.true. +.false. .false. From fc123ed59d5d1c52660aef7152006a335d3497e4 Mon Sep 17 00:00:00 2001 From: Jon Wolfe Date: Thu, 13 Jun 2024 12:44:35 -0500 Subject: [PATCH 36/40] Update defaults for RRSwISC6to18E3r5 to match ongoing tests --- cime_config/config_grids.xml | 12 ++++++------ .../bld/namelist_files/namelist_defaults_mpaso.xml | 4 +++- driver-mct/cime_config/config_component_e3sm.xml | 1 + 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml index 05ddf741d27c..0d0ed53b9490 100755 --- a/cime_config/config_grids.xml +++ b/cime_config/config_grids.xml @@ -3220,7 +3220,7 @@ 1440 720 $DIN_LOC_ROOT/share/domains/domain.lnd.r025_IcoswISC30E3r5.240129.nc - $DIN_LOC_ROOT/share/domains/domain.lnd.r025_RRSwISC6to18E3r5.240328.nc + $DIN_LOC_ROOT/share/domains/domain.lnd.r025_RRSwISC6to18E3r5.240402.nc r025 is 1/4 degree river routing grid: @@ -3938,8 +3938,8 @@ cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_traave.20240328.nc cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_trfv2.20240206.nc cpl/gridmaps/ne120pg2/map_ne120pg2_to_r025_trbilin.20240328.nc - cpl/gridmaps/ne120pg2/map_r025_to_ne120pg2_traave.20240328.nc - cpl/gridmaps/ne120pg2/map_r025_to_ne120pg2_trbilin.20240328.nc + cpl/gridmaps/r025/map_r025_to_ne120pg2_traave.20240328.nc + cpl/gridmaps/r025/map_r025_to_ne120pg2_traave.20240328.nc @@ -5219,7 +5219,7 @@ - cpl/cpl6/map_JRA025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_JRA025_to_RRSwISC6to18E3r5_cstmnn.r250e1250_58NS.20240328.nc cpl/cpl6/map_JRA025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc @@ -5314,7 +5314,7 @@ - cpl/cpl6/map_r05_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_r05_to_RRSwISC6to18E3r5.cstmnn.r250e1250_58NS.20240328.nc cpl/cpl6/map_r05_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc @@ -5324,7 +5324,7 @@ - cpl/cpl6/map_r025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc + cpl/cpl6/map_r025_to_RRSwISC6to18E3r5.cstmnn.r250e1250_58NS.20240328.nc cpl/cpl6/map_r025_to_RRSwISC6to18E3r5_cstmnn.r50e100.20240328.nc diff --git a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml index ffd45353943e..6b9b14eb4e85 100644 --- a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml +++ b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml @@ -106,6 +106,7 @@ .true. .true. .true. +.true. 10.0 1000.0 1000.0 @@ -121,6 +122,7 @@ 154.0 77.0 38.5 +100.0 .false. 10.0 @@ -565,7 +567,7 @@ '0000_00:00:05' '0000_00:00:02.5' '0000_00:00:01.25' -'0000_00:00:10' +'0000_00:00:05' 2 .true. 2 diff --git a/driver-mct/cime_config/config_component_e3sm.xml b/driver-mct/cime_config/config_component_e3sm.xml index 2895650d2426..2a52c9fb7c30 100755 --- a/driver-mct/cime_config/config_component_e3sm.xml +++ b/driver-mct/cime_config/config_component_e3sm.xml @@ -245,6 +245,7 @@ CESM1_MOD CESM1_MOD RASM_OPTION1 + RASM_OPTION2 run_coupling env_run.xml From ed57f1c5740216cd5cd1023ecbe2be59298d0b78 Mon Sep 17 00:00:00 2001 From: Gautam Bisht Date: Sat, 11 May 2024 14:11:50 -0700 Subject: [PATCH 37/40] Updates ELM subroutine to include dt as an input The `dt` is now included as an input to the subroutine `RootDynamics()`. Fixes #5913 [BFB] --- components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90 | 2 +- components/elm/src/biogeochem/EcosystemDynMod.F90 | 2 +- components/elm/src/biogeochem/RootDynamicsMod.F90 | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90 b/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90 index c3f877f48964..7ff65fe8f1f0 100644 --- a/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90 +++ b/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90 @@ -298,7 +298,7 @@ subroutine CNEcosystemDynBetr(bounds, & call RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, & canopystate_vars, cnstate_vars, & - crop_vars, energyflux_vars, soilstate_vars) + crop_vars, energyflux_vars, soilstate_vars, dt) call t_stopf('RootDynamics') end if diff --git a/components/elm/src/biogeochem/EcosystemDynMod.F90 b/components/elm/src/biogeochem/EcosystemDynMod.F90 index f814ca9f3678..32dd9cf6afe3 100644 --- a/components/elm/src/biogeochem/EcosystemDynMod.F90 +++ b/components/elm/src/biogeochem/EcosystemDynMod.F90 @@ -639,7 +639,7 @@ subroutine EcosystemDynNoLeaching2(bounds, & call RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, & canopystate_vars, & - cnstate_vars, crop_vars, energyflux_vars, soilstate_vars) + cnstate_vars, crop_vars, energyflux_vars, soilstate_vars, dt) call t_stop_lnd(event) end if diff --git a/components/elm/src/biogeochem/RootDynamicsMod.F90 b/components/elm/src/biogeochem/RootDynamicsMod.F90 index 79f4c1bb67b7..3c78043aa6ac 100644 --- a/components/elm/src/biogeochem/RootDynamicsMod.F90 +++ b/components/elm/src/biogeochem/RootDynamicsMod.F90 @@ -39,7 +39,7 @@ module RootDynamicsMod ! subroutine RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, & canopystate_vars, & - cnstate_vars, crop_vars, energyflux_vars, soilstate_vars) + cnstate_vars, crop_vars, energyflux_vars, soilstate_vars, dt) ! ! !DESCRIPTION: ! This routine determine the fine root distribution @@ -62,12 +62,12 @@ subroutine RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp type(crop_type) , intent(in) :: crop_vars type(energyflux_type) , intent(in) :: energyflux_vars type(soilstate_type) , intent(inout) :: soilstate_vars + real(r8) , intent(in) :: dt ! radiation time step delta t (seconds) ! ! !LOCAL VARIABLES: integer :: f,c,p,lev,j ! indices - real(r8) :: dt ! radiation time step delta t (seconds) real(r8) :: w_limit(bounds%begp:bounds%endp) ! soil water weighting factor real(r8) :: rswa(bounds%begp:bounds%endp,1:nlevgrnd) ! soil water availability in each soil layer real(r8) :: rsmn(bounds%begp:bounds%endp,1:nlevgrnd) ! soil nitrogen availability in each soil layer From cd7f69423b0ba1fa7a0faaf1a7629b30fe428a0c Mon Sep 17 00:00:00 2001 From: James Foucar Date: Mon, 17 Jun 2024 11:03:04 -0600 Subject: [PATCH 38/40] Update CIME submodule ... to f903115718ebc30669ce557f511abaef231a1d88 Fixes: 1) Several fixes/changes to containerized CI. No impact to E3SM 2) fix an issue with single component runs which do use mediator 3) Fix ERI test type: need to make sure ref2 run dir exists 4) Fixes config ignoring paths containing "tests" 5) More robust approach to waiting for many threads, fixes a bug where archiving threads block test reporting. 6) The xmlchange tool should not raise an exception when a diff is detected. Changes: 1) New feature: long grid names 2) Update documentation 3) Refactors how/when rebuilds are required. Reduce scenarios in which a full rebuild is required 4) Remove reference to cice5 and ww3dev, they are no longer used [BFB] --- cime | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime b/cime index 4388509869bd..f903115718eb 160000 --- a/cime +++ b/cime @@ -1 +1 @@ -Subproject commit 4388509869bd5988d6315e2da65b1a2fbfa604fa +Subproject commit f903115718ebc30669ce557f511abaef231a1d88 From 254955c3259f9baab3b561cd03421b63539342eb Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 3 May 2024 15:45:32 -0500 Subject: [PATCH 39/40] Add shared E3SM constants to MPAS-Ocean init mode --- components/mpas-ocean/src/mode_init/shr_const_mod.F | 1 + components/mpas-ocean/src/mode_init/shr_kind_mod.F | 1 + 2 files changed, 2 insertions(+) create mode 120000 components/mpas-ocean/src/mode_init/shr_const_mod.F create mode 120000 components/mpas-ocean/src/mode_init/shr_kind_mod.F diff --git a/components/mpas-ocean/src/mode_init/shr_const_mod.F b/components/mpas-ocean/src/mode_init/shr_const_mod.F new file mode 120000 index 000000000000..c471e79113fd --- /dev/null +++ b/components/mpas-ocean/src/mode_init/shr_const_mod.F @@ -0,0 +1 @@ +../../../../share/util/shr_const_mod.F90 \ No newline at end of file diff --git a/components/mpas-ocean/src/mode_init/shr_kind_mod.F b/components/mpas-ocean/src/mode_init/shr_kind_mod.F new file mode 120000 index 000000000000..77a61f967b6a --- /dev/null +++ b/components/mpas-ocean/src/mode_init/shr_kind_mod.F @@ -0,0 +1 @@ +../../../../share/util/shr_kind_mod.F90 \ No newline at end of file From 2c73596b056c0b85388b064773eb15a1c5b1e8d0 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 3 May 2024 16:30:28 -0500 Subject: [PATCH 40/40] Switch global ocean in init mode to E3SM shared constants This merge also migrates the computation of the coriolis parameter to using E3SM shared constants. The unused constants module imports are removed from several init mode utilities to ensure that they don't use constants that are inconsistent with E3SM's versions. --- components/mpas-ocean/src/mode_init/Makefile | 4 +- .../mode_init/mpas_ocn_init_cell_markers.F | 3 - .../mode_init/mpas_ocn_init_global_ocean.F | 61 ++++++++++--------- .../mode_init/mpas_ocn_init_interpolation.F | 3 - .../mode_init/mpas_ocn_init_spherical_utils.F | 10 +-- .../mpas_ocn_init_ssh_and_landIcePressure.F | 2 - .../mode_init/mpas_ocn_init_vertical_grids.F | 2 - 7 files changed, 40 insertions(+), 45 deletions(-) diff --git a/components/mpas-ocean/src/mode_init/Makefile b/components/mpas-ocean/src/mode_init/Makefile index 2f16f1973183..1b1f61b637fe 100644 --- a/components/mpas-ocean/src/mode_init/Makefile +++ b/components/mpas-ocean/src/mode_init/Makefile @@ -2,7 +2,9 @@ OBJS = mpas_ocn_init_mode.o -UTILS = mpas_ocn_init_spherical_utils.o \ +UTILS = shr_kind_mod.o \ + shr_const_mod.o \ + mpas_ocn_init_spherical_utils.o \ mpas_ocn_init_vertical_grids.o \ mpas_ocn_init_cell_markers.o \ mpas_ocn_init_interpolation.o \ diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_cell_markers.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_cell_markers.F index ac2d3465757d..2aed74c10ab4 100644 --- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_cell_markers.F +++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_cell_markers.F @@ -22,11 +22,8 @@ module ocn_init_cell_markers use mpas_kind_types use mpas_derived_types use mpas_pool_routines - use mpas_constants use mpas_timer - use ocn_constants - implicit none private diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_global_ocean.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_global_ocean.F index 0b810b193122..a972cf71381c 100644 --- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_global_ocean.F +++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_global_ocean.F @@ -20,18 +20,19 @@ module ocn_init_global_ocean + use shr_kind_mod, only: SHR_KIND_R8 + use shr_const_mod + use mpas_kind_types use mpas_io_units use mpas_derived_types use mpas_pool_routines - use mpas_constants use mpas_io use mpas_io_streams use mpas_stream_manager use mpas_timekeeping use mpas_dmpar - use ocn_constants use ocn_config use ocn_diagnostics_variables use ocn_init_cell_markers @@ -632,8 +633,8 @@ subroutine ocn_init_setup_global_ocean_read_topo(domain, iErr)!{{{ call MPAS_closeStream(topographyStream) if (config_global_ocean_topography_latlon_degrees) then - topoLat % array(:) = topoLat % array(:) * pii / 180.0_RKIND - topoLon % array(:) = topoLon % array(:) * pii / 180.0_RKIND + topoLat % array(:) = topoLat % array(:) * SHR_CONST_PI / 180.0_RKIND + topoLon % array(:) = topoLon % array(:) * SHR_CONST_PI / 180.0_RKIND end if end subroutine ocn_init_setup_global_ocean_read_topo!}}} @@ -825,26 +826,26 @@ subroutine ocn_init_setup_global_ocean_create_model_topo(domain, iErr)!{{{ call ocn_init_interpolation_nearest_horiz(topoLon % array, topoLat % array, & topoIC % array, nLonTopo, nLatTopo, & lonCell, latCell, bottomDepthObserved, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) if (config_global_ocean_topography_has_ocean_frac) then call ocn_init_interpolation_nearest_horiz(topoLon % array, topoLat % array, & oceanFracIC % array, nLonTopo, nLatTopo, & lonCell, latCell, oceanFracObserved, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) end if elseif (config_global_ocean_topography_method == "bilinear_interpolation") then call ocn_init_interpolation_bilinear_horiz(topoLon % array, topoLat % array, & topoIC % array, nLonTopo, nLatTopo, & lonCell, latCell, bottomDepthObserved, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) if (config_global_ocean_topography_has_ocean_frac) then call ocn_init_interpolation_bilinear_horiz(topoLon % array, topoLat % array, & oceanFracIC % array, nLonTopo, nLatTopo, & lonCell, latCell, oceanFracObserved, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) end if else @@ -1642,11 +1643,11 @@ subroutine ocn_init_setup_global_ocean_read_tracer_lat_lon(domain, iErr)!{{{ if (config_global_ocean_tracer_latlon_degrees) then do iLat = 1, nLatTracer - tracerLat % array(iLat) = tracerLat % array(iLat) * pii / 180.0_RKIND + tracerLat % array(iLat) = tracerLat % array(iLat) * SHR_CONST_PI / 180.0_RKIND end do do iLon = 1, nLonTracer - tracerLon % array(iLon) = tracerLon % array(iLon) * pii / 180.0_RKIND + tracerLon % array(iLon) = tracerLon % array(iLon) * SHR_CONST_PI / 180.0_RKIND end do end if @@ -1714,11 +1715,11 @@ subroutine ocn_init_setup_global_ocean_read_swData_lat_lon(domain, iErr)!{{{ if (config_global_ocean_swData_latlon_degrees) then do iLat = 1, nLatSW - swDataLat % array(iLat) = swDataLat % array(iLat) * pii / 180.0_RKIND + swDataLat % array(iLat) = swDataLat % array(iLat) * SHR_CONST_PI / 180.0_RKIND end do do iLon = 1, nLonSW - swDataLon % array(iLon) = swDataLon % array(iLon) * pii / 180.0_RKIND + swDataLon % array(iLon) = swDataLon % array(iLon) * SHR_CONST_PI / 180.0_RKIND end do end if @@ -1801,11 +1802,11 @@ subroutine ocn_init_setup_global_ocean_read_ecosys_lat_lon(domain, iErr)!{{{ if (config_global_ocean_ecosys_latlon_degrees) then do iLat = 1, nLatEcosys - tracerLat % array(iLat) = tracerLat % array(iLat) * pii / 180.0_RKIND + tracerLat % array(iLat) = tracerLat % array(iLat) * SHR_CONST_PI / 180.0_RKIND end do do iLon = 1, nLonEcosys - tracerLon % array(iLon) = tracerLon % array(iLon) * pii / 180.0_RKIND + tracerLon % array(iLon) = tracerLon % array(iLon) * SHR_CONST_PI / 180.0_RKIND end do end if @@ -2072,13 +2073,13 @@ subroutine ocn_init_setup_global_ocean_interpolate_tracers(domain, tracerArray, call ocn_init_interpolation_nearest_horiz(tracerLon % array, tracerLat % array, & tracerIC % array, nLonTracer, nLatTracer, & lonCell, latCell, interpTracer, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) elseif (config_global_ocean_tracer_method .eq. "bilinear_interpolation") then call ocn_init_interpolation_bilinear_horiz(tracerLon % array, tracerLat % array, & tracerIC % array, nLonTracer, nLatTracer, & lonCell, latCell, interpTracer, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) else call mpas_log_write( 'Invalid choice of config_global_ocean_tracer_method.', MPAS_LOG_CRIT) @@ -2239,13 +2240,13 @@ subroutine ocn_init_setup_global_ocean_interpolate_ecosys_forcing(domain, fieldN call ocn_init_interpolation_nearest_horiz(tracerLon % array, tracerLat % array, & ecosysForcingIC % array(:,:,timeCounter), nLonEcosys, nLatEcosys, & lonCell, latCell, ecosysForcingField, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) elseif (config_global_ocean_ecosys_method .eq. "bilinear_interpolation") then call ocn_init_interpolation_bilinear_horiz(tracerLon % array, tracerLat % array, & ecosysForcingIC % array(:,:,timeCounter), nLonEcosys, nLatEcosys, & lonCell, latCell, ecosysForcingField, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) else call mpas_log_write( 'Invalid choice of config_global_ocean_ecosys_method.', MPAS_LOG_CRIT) iErr = 1 @@ -2449,33 +2450,33 @@ subroutine ocn_init_setup_global_ocean_interpolate_swData(domain, iErr)!{{{ call ocn_init_interpolation_nearest_horiz(swDataLon % array, swDataLat % array, & chlorophyllIC % array, nLonSW, nLatSW, & lonCell, latCell, chlorophyllData, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) call ocn_init_interpolation_nearest_horiz(swDataLon % array, swDataLat % array, & zenithAngleIC % array, nLonSW, nLatSW, & lonCell, latCell, zenithAngle, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) call ocn_init_interpolation_nearest_horiz(swDataLon % array, swDataLat % array, & clearSKYIC % array, nLonSW, nLatSW, & lonCell, latCell, clearSkyRadiation, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) elseif (config_global_ocean_swData_method .eq. "bilinear_interpolation") then call ocn_init_interpolation_bilinear_horiz(swDataLon % array, swDataLat % array, & chlorophyllIC % array, nLonSW, nLatSW, & lonCell, latCell, chlorophyllData, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) call ocn_init_interpolation_bilinear_horiz(swDataLon % array, swDataLat % array, & zenithAngleIC % array, nLonSW, nLatSW, & lonCell, latCell, zenithAngle, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) call ocn_init_interpolation_bilinear_horiz(swDataLon % array, swDataLat % array, & clearSKYIC % array, nLonSW, nLatSW, & lonCell, latCell, clearSkyRadiation, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) else call mpas_log_write( 'Invalid choice of config_global_ocean_swData_method.', MPAS_LOG_CRIT) iErr = 1 @@ -2603,8 +2604,8 @@ subroutine ocn_init_setup_global_ocean_read_windstress(domain, iErr)!{{{ call MPAS_closeStream(windstressStream) if (config_global_ocean_windstress_latlon_degrees) then - windLat % array(:) = windLat % array(:) * pii / 180.0_RKIND - windLon % array(:) = windLon % array(:) * pii / 180.0_RKIND + windLat % array(:) = windLat % array(:) * SHR_CONST_PI / 180.0_RKIND + windLon % array(:) = windLon % array(:) * SHR_CONST_PI / 180.0_RKIND end if end subroutine ocn_init_setup_global_ocean_read_windstress!}}} @@ -2657,23 +2658,23 @@ subroutine ocn_init_setup_global_ocean_interpolate_windstress(domain, iErr)!{{{ call ocn_init_interpolation_nearest_horiz(windLon % array, windLat % array, & zonalWindIC % array, nLonWind, nLatWind, & lonCell, latCell, windStressZonal, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) call ocn_init_interpolation_nearest_horiz(windLon % array, windLat % array, & meridionalWindIC % array, nLonWind, nLatWind, & lonCell, latCell, windStressMeridional, nCells, & - inXPeriod = 2.0_RKIND * pii) + inXPeriod = 2.0_RKIND * SHR_CONST_PI) elseif (config_global_ocean_windstress_method .eq. "bilinear_interpolation") then call ocn_init_interpolation_bilinear_horiz(windLon % array, windLat % array, & zonalWindIC % array, nLonWind, nLatWind, & lonCell, latCell, windStressZonal, nCells, & - inXPeriod = 2.0_RKIND*pii) + inXPeriod = 2.0_RKIND*SHR_CONST_PI) call ocn_init_interpolation_bilinear_horiz(windLon % array, windLat % array, & meridionalWindIC % array, nLonWind, nLatWind, & lonCell, latCell, windStressMeridional, nCells, & - inXPeriod = 2.0_RKIND*pii) + inXPeriod = 2.0_RKIND*SHR_CONST_PI) else call mpas_log_write( 'Invalid choice of config_global_ocean_windstress_method.', MPAS_LOG_CRIT) diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_interpolation.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_interpolation.F index 7510357606f4..3ff4df4b4dad 100644 --- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_interpolation.F +++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_interpolation.F @@ -22,11 +22,8 @@ module ocn_init_interpolation use mpas_kind_types use mpas_derived_types use mpas_pool_routines - use mpas_constants use mpas_timer - use ocn_constants - implicit none private save diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_spherical_utils.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_spherical_utils.F index d24c182e8c0a..4ccd920ddf9f 100644 --- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_spherical_utils.F +++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_spherical_utils.F @@ -19,11 +19,13 @@ module ocn_init_spherical_utils + use shr_kind_mod, only: SHR_KIND_R8 + use shr_const_mod + use mpas_kind_types use mpas_io_units use mpas_derived_types use mpas_pool_routines - use mpas_constants use mpas_stream_manager use ocn_config @@ -293,15 +295,15 @@ subroutine ocn_init_realistic_coriolis_parameter(domain, err)!{{{ call mpas_pool_get_array(meshPool, 'fVertex', fVertex) do iCell = 1, nCellsSolve - fCell(iCell) = 2.0_RKIND * omega * sin(latCell(iCell)) + fCell(iCell) = 2.0_RKIND * SHR_CONST_OMEGA * sin(latCell(iCell)) end do do iVertex = 1, nVerticesSolve - fVertex(iVertex) = 2.0_RKIND * omega * sin( latVertex(iVertex) ) + fVertex(iVertex) = 2.0_RKIND * SHR_CONST_OMEGA * sin( latVertex(iVertex) ) end do do iEdge = 1, nEdgesSolve - fEdge(iEdge) = 2.0_RKIND * omega * sin( latEdge(iEdge) ) + fEdge(iEdge) = 2.0_RKIND * SHR_CONST_OMEGA * sin( latEdge(iEdge) ) end do block_ptr => block_ptr % next diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F index c85249635a7d..dfa7c6b430b6 100644 --- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F +++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F @@ -25,9 +25,7 @@ module ocn_init_ssh_and_landIcePressure use mpas_io_units use mpas_derived_types use mpas_pool_routines - use mpas_constants - use ocn_constants use ocn_config use ocn_diagnostics_variables diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F index 2fcbf507f7a3..e28cf5e97fad 100644 --- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F +++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F @@ -22,11 +22,9 @@ module ocn_init_vertical_grids use mpas_kind_types use mpas_derived_types use mpas_pool_routines - use mpas_constants use mpas_timer use mpas_io - use ocn_constants use ocn_config use ocn_diagnostics_variables